diff --git a/.github/PULL_REQUEST_TEMPLATE/fix-issue.md b/.github/PULL_REQUEST_TEMPLATE/fix-issue.md index 506996510c7e..f7cf22eb59c7 100644 --- a/.github/PULL_REQUEST_TEMPLATE/fix-issue.md +++ b/.github/PULL_REQUEST_TEMPLATE/fix-issue.md @@ -6,14 +6,14 @@ assignees: '' --- - ## Fix #XYZ - + diff --git a/.github/PULL_REQUEST_TEMPLATE/other-pr.md b/.github/PULL_REQUEST_TEMPLATE/other-pr.md index 4b69a80460af..fad49836df92 100644 --- a/.github/PULL_REQUEST_TEMPLATE/other-pr.md +++ b/.github/PULL_REQUEST_TEMPLATE/other-pr.md @@ -6,14 +6,14 @@ assignees: '' --- - ## Description - + diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 368766349bab..370b66854051 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,9 +2,24 @@ name: Dotty on: push: + ## Be careful if you add or remove something here! Quoting from + ## : + ## + ## > If you define only tags/tags-ignore or only branches/branches-ignore, the + ## > workflow won't run for events affecting the undefined Git ref. If you + ## > define neither tags/tags-ignore or branches/branches-ignore, the workflow + ## > will run for events affecting either branches or tags. + ## + ## We want the CI to run on both branches and tags, so we should either have: + ## - both (tags or tags-ignore) and (branches or branches-ignore), + ## - or neither of them. + ## But it's important to not have only one or the other. tags: - - '**' + - '*' + branches-ignore: + - 'gh-readonly-queue/**' pull_request: + merge_group: schedule: - cron: '0 3 * * *' # Every day at 3 AM workflow_dispatch: @@ -87,6 +102,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -132,6 +148,7 @@ jobs: github.event_name == 'push' && github.ref != 'refs/heads/main' ) + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -208,6 +225,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -248,6 +266,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -296,6 +315,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -344,6 +364,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -575,45 +596,6 @@ jobs: external_repository: lampepfl/dotty-website publish_branch: gh-pages - nightly_unmanaged_community_build: - # Self-hosted runner is used only for getting current build version - runs-on: [self-hosted, Linux] - container: - image: lampepfl/dotty:2021-03-22 - options: --cpu-shares 4096 - volumes: - - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [publish_nightly] - if: "(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && github.repository == 'lampepfl/dotty'" - env: - NIGHTLYBUILD: yes - steps: - - name: Reset existing repo - run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - - - name: Checkout cleanup script - uses: actions/checkout@v3 - - - name: Cleanup - run: .github/workflows/cleanup.sh - - - name: Git Checkout - uses: actions/checkout@v3 - - - name: Add SBT proxy repositories - run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - - - name: Get version string for this build - run: | - ver=$(./project/scripts/sbt "print scala3-compiler-bootstrapped/version" | tail -n1) - echo "This build version: $ver" - echo "THISBUILD_VERSION=$ver" >> $GITHUB_ENV - # Steps above are copy-pasted from publish_nightly, needed only to resolve THISBUILD_VERSION - - name: Trigger unmanaged community build - run: .github/workflows/scripts/triggerUnmanagedCommunityBuild.sh "${{ secrets.BUILD_TOKEN }}" "$THISBUILD_VERSION" - publish_release: permissions: contents: write # for actions/create-release to create a release diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 3ac31b0994f7..bb1aec1290c0 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -6,6 +6,7 @@ on: push: branches: - 'language-reference-stable' + merge_group: permissions: contents: write pull-requests: write diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 7415759078ac..ba4bae0456d0 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -13,9 +13,9 @@ jobs: options: --cpu-shares 4096 env: - SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} + SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} SDKMAN_TOKEN: ${{ secrets.SDKMAN_TOKEN }} - + steps: - name: Reset existing repo run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true diff --git a/.github/workflows/scaladoc.yaml b/.github/workflows/scaladoc.yaml index 9ccbe34788ce..3108f2b94562 100644 --- a/.github/workflows/scaladoc.yaml +++ b/.github/workflows/scaladoc.yaml @@ -4,9 +4,11 @@ on: push: branches-ignore: - 'language-reference-stable' + - 'gh-readonly-queue/**' pull_request: branches-ignore: - 'language-reference-stable' + merge_group: permissions: contents: read @@ -15,7 +17,8 @@ jobs: env: AZURE_STORAGE_SAS_TOKEN: ${{ secrets.AZURE_STORAGE_SAS_TOKEN }} runs-on: ubuntu-latest - if: "( github.event_name == 'pull_request' + if: "github.event_name == 'merge_group' + || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') && !contains(github.event.pull_request.body, '[skip docs]') ) diff --git a/.github/workflows/scripts/publish-sdkman.sh b/.github/workflows/scripts/publish-sdkman.sh index 07d35a72a65e..70987bff175b 100755 --- a/.github/workflows/scripts/publish-sdkman.sh +++ b/.github/workflows/scripts/publish-sdkman.sh @@ -9,11 +9,11 @@ set -u -# latest stable dotty version +# latest stable dotty version DOTTY_VERSION=$(curl -s https://api.github.com/repos/lampepfl/dotty/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') DOTTY_URL="https://github.com/lampepfl/dotty/releases/download/$DOTTY_VERSION/scala3-$DOTTY_VERSION.zip" -# checking if dotty version is available +# checking if dotty version is available if ! curl --output /dev/null --silent --head --fail "$DOTTY_URL"; then echo "URL doesn't exist: $DOTTY_URL" exit 1 diff --git a/.github/workflows/scripts/triggerUnmanagedCommunityBuild.sh b/.github/workflows/scripts/triggerUnmanagedCommunityBuild.sh deleted file mode 100755 index 694428e29bb5..000000000000 --- a/.github/workflows/scripts/triggerUnmanagedCommunityBuild.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash - -# This is script for triggering unamanged community build upon releasing nightly version. -# Script sends request to CB Jenkins instance to start the build for given released Scala version -# Prints url of created job to stdout -# -# Requirement: -# - the latest (nightly) version of scala should be published - -set -u - -if [ $# -ne 2 ]; then - echo "Wrong number of script arguments, expected , got $#: $@" - exit 1 -fi - -CB_ENDPOINT=https://scala3.westeurope.cloudapp.azure.com -CB_BUILD_TOKEN="$1" -SCALA_VERSION="$2" - -startRunResponse=$(curl "${CB_ENDPOINT}/job/runBuild/buildWithParameters?token=${CB_BUILD_TOKEN}&publishedScalaVersion=${SCALA_VERSION}" -v 2>&1) -echo "${startRunResponse}" -queueItem=$(echo "${startRunResponse}" | grep -oP "< Location: \K[\w\d:/.//]+") -# Wait until Jenkins does acknowledge the build (max 1 min ) -for i in {1..12}; do - buildUrl=$(curl -s "${queueItem}/api/json?tree=executable[url]" | jq .executable.url) - if [[ "null" == "${buildUrl}" ]]; then - echo "Waiting for build start..." - sleep 5 - else - echo "Created build url: ${buildUrl}" - exit 0 - fi -done - -# Set error if failed to resolve build url -exit 1 diff --git a/.gitignore b/.gitignore index eb9541428302..5240662741bb 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,9 @@ metals.sbt .idea_modules /.worksheet/ +# scala-cli +.scala-build + # Partest dotty.jar dotty-lib.jar @@ -90,3 +93,5 @@ compiler/test-coursier/run/*.jar # docs related contributors.js content-contributors.css +docs/_spec/_site/ +docs/_spec/.jekyll-metadata diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3267d1f02700..90496bcd0c0a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,60 +1,5 @@ -# Dotty Developer Guidelines +# Contributing to Dotty -These guidelines are meant to be a living document that should be changed and adapted as needed. We encourage changes that make it easier to achieve our goals in an efficient way. - -## General Workflow - -This is the process for committing code to the Scala project. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc. - -1. Make sure you have signed the [Scala CLA](https://www.lightbend.com/contribute/cla/scala), if not, sign it. -2. Before starting to work on a feature or a fix, it's good practice to ensure that: - 1. There is a ticket for your work in the project's [issue tracker](https://github.com/lampepfl/dotty/issues); - 2. The ticket has been discussed and prioritized by the team. -3. You should always perform your work in its own Git branch. The branch should be given a descriptive name that explains its intent. Some teams also like adding the ticket number and/or the [GitHub](http://github.com) user ID to the branch name, these details is up to each of the individual teams. (See below for more details on branch naming.) -4. When the feature or fix is completed you should open a [Pull Request](https://help.github.com/articles/using-pull-requests) on GitHub. -5. The Pull Request should be reviewed by other maintainers (as many as feasible/practical). Note that a reviewer can also be an outside contributor—members of Typesafe or VirtusLab and independent contributors are encouraged to participate in the review process. It is not a closed process. Please try to avoid conflict of interest—the spirit of the review process is to evenly distribute the understanding of our code base across its maintainers as well as to load balance quality assurance. Assigning a review to a "sure win" reviewer is not a good long-term solution. -6. After the review, you should resolve issues brought up by the reviewers as needed (pushing a new commit to address reviewers' comments), iterating until the reviewers give their thumbs up, the "LGTM" (acronym for "Looks Good To Me"). -7. Once the code has passed review the Pull Request can be merged into the distribution. - -## Pull Request Requirements - -In order for a Pull Request to be considered, it has to meet these requirements: - -1. Live up to the current code standard: - - Not violate [DRY](https://www.oreilly.com/library/view/97-things-every/9780596809515/ch30.html). - - [Boy Scout Rule](https://www.oreilly.com/library/view/97-things-every/9780596809515/ch08.html) should be applied. -2. Tests are of paramount importance. -3. The code must be well documented in the project's standard documentation format (see the ‘Documentation’ section below). - -If *all* of these requirements are not met then the code should **not** be merged into the distribution, and need not even be reviewed. - -## Documentation - -All contributed code should come accompanied by documentation. Pull requests containing undocumented code will not be accepted. Both user-facing Scaladoc comments, as well as committer-facing internal documentation (i.e. essential design decisions that other maintainers should know about should be placed inline with line comments `//`) should be accompanying all contributed code where possible. - - -## Work In Progress - -It is ok to work on a public feature branch in the GitHub repository. Something that can sometimes be useful for early feedback etc. If so, then it is preferable to name the branch accordingly. This can be done by either prefixing the name with ``wip-`` as in ‘Work In Progress’, or use hierarchical names like ``wip/..``, ``feature/..`` or ``topic/..``. Either way is fine as long as it is clear that it is work in progress and not ready for merge. This work can temporarily have a lower standard. However, to be merged into master it will have to go through the regular process outlined above, with Pull Request, review etc.. - -Also, to facilitate both well-formed commits and working together, the ``wip`` and ``feature``/``topic`` identifiers also have special meaning. Any branch labeled with ``wip`` is considered “git-unstable” and may be rebased and have its history rewritten. Any branch with ``feature``/``topic`` in the name is considered “stable” enough for others to depend on when a group is working on a feature. - -## Creating Commits And Writing Commit Messages - -Follow these guidelines when creating public commits and writing commit messages. - -1. If your work spans multiple local commits (for example; if you do safe point commits while working in a feature branch or work in a branch for long time doing merges/rebases etc.) then please do not commit it all but rewrite the history by squashing the commits into one large commit which is accompanied by a detailed commit message for (as discussed in the following sections). For more info, see the article: [Git Workflow](http://sandofsky.com/blog/git-workflow.html). Additionally, every commit should be able to be used in isolation—that is, each commit must build and pass all tests. -2. The first line should be a descriptive sentence about what the commit is doing. It should be possible to fully understand what the commit does by just reading this single line. It is **not ok** to only list the ticket number, type "minor fix" or similar. If the commit has a corresponding ticket, include a reference to the ticket number, prefixed with "Closes #", at the beginning of the first line followed by the title of the ticket, assuming that it aptly and concisely summarizes the commit in a single line. If the commit is a small fix, then you are done. If not, go to 3. -3. Following the single line description (ideally no more than 70 characters long) should be a blank line followed by an enumerated list with the details of the commit. -4. Add keywords for your commit (depending on the degree of automation we reach, the list may change over time): - * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone is encouraged to give feedback, however. (Remember that @-mentions will result in notifications also when pushing to a WIP branch, so please only include this in your commit message when you're ready for your pull request to be reviewed. Alternatively, you may request a review in the pull request's description.) - * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the ticket as fixed in the issue tracker (Assembla understands this). - * ``backport to _branch name_`` - if the fix needs to be cherry-picked to another branch (like 2.9.x, 2.10.x, etc) - -Example: - - Closes #2 Fixes the build - - - Details 1 - - Details 2 - - Details 3 +Firstly, thanks for being willing to contribute to Dotty! Head on over the +[Scala 3 Contributing +Guide](https://dotty.epfl.ch/docs/contributing/index.html), which should have all the info you're looking for. diff --git a/MAINTENANCE.md b/MAINTENANCE.md index 7bde90839724..54e74f7cb7ca 100644 --- a/MAINTENANCE.md +++ b/MAINTENANCE.md @@ -1,9 +1,12 @@ # Issue Supervisor Role -This document formally defines the _Issue Supervisor_ role. This is a repository maintenance role that is assigned to core contributors on rotating basis. + +This document formally defines the _Issue Supervisor_ role. This is a repository maintenance role that is assigned to core contributors on a rotating basis. ## Responsibilities -Issue supervisor is responsible for: -- Health of the CI, nightly releases and benchmark infrastructure. + +The issue supervisor is responsible for: + +- The health of the CI, nightly releases and benchmark infrastructure. - PRs of external contributors: assigning someone to review, or handling themselves. - Triaging issues (especially new): - Each issue needs to be assigned an `itype` and 1 or more `area` labels. @@ -12,33 +15,39 @@ Issue supervisor is responsible for: - Modifying issue labels to best capture information about the issues - Attempting to reproduce the issue (or label “stat:cannot reproduce”) - Further minimizing the issue or asking the reporter of the issue to minimize it correctly (or label “stat:needs minimization”) + - Identifying which issues are of considerable importance and bringing them to the attention of the team during the Dotty meeting, where they can be filtered and added to the [Future Versions](https://github.com/lampepfl/dotty/milestone/46) milestone. Other core teammates are responsible for providing information to the issue supervisor in a timely manner when it is requested if they have that information. ## Assignment -Issue supervisor is appointed for 7 days and is responsible for what is specified in the “Responsibilities” section during those 7 days. Their assumption of the role starts from the Dotty Meeting on Monday and ends on the next Dotty Meeting on Monday. + +The issue supervisor is appointed for 7 days and is responsible for what is specified in the “Responsibilities” section during those 7 days. Their assumption of the role starts from the Dotty Meeting on Monday and ends on the next Dotty Meeting on Monday. During the Dotty Meeting, an issue supervisor is assigned for the current week and for the week after that. -Issue supervisor schedule is maintained in the [Issue Supervisor Statistics spreadsheet](https://docs.google.com/spreadsheets/d/19IAqNzHfJ9rsii3EsjIGwPz5BLTFJs_byGM3FprmX3E/edit?usp=sharing). So, someone who knows their availability several weeks ahead into the future can assign themselves to be an issue supervisor well ahead of time. +The issue supervisor schedule is maintained in the [Issue Supervisor Statistics spreadsheet](https://docs.google.com/spreadsheets/d/19IAqNzHfJ9rsii3EsjIGwPz5BLTFJs_byGM3FprmX3E/edit?usp=sharing). So, someone who knows their availability several weeks ahead into the future can assign themselves to be an issue supervisor well ahead of time. ## Prerequisites + An issue supervisor needs to have all the accesses and privileges required to get their job done. This might include: + - Admin rights in lampepfl/dotty repository - Admin rights in lampepfl/dotty-feature-requests repository -- Permissions to create new repositories in lampepfl organization (needed to fork repositories for the community build) +- Permission to create new repositories in lampepfl organization (needed to fork repositories for the community build) - Access to the LAMP slack to be able to ask for help with the infrastructure, triaging and such ## Procedures -To ensure proper health of the infrastructure, the supervisor regularly monitors its proper operation. If a malfunction is detected, the supervisor's job is to ensure that someone is working on it (or solve it on their own). + +To ensure the proper health of the infrastructure, the supervisor regularly monitors its proper operation. If a malfunction is detected, the supervisor's job is to ensure that someone is working on it (or solve it on their own). If it is unclear what area an issue belongs to, the supervisor asks for advice from other team members on Slack or GitHub. If, after asking for advice, it turns out that nobody in the team knows how to classify it, the issue must be classified with a “stat:needs triage” label. If it is unclear who should review an external PR, the supervisor asks for advice from the rest of the core team. If after asking for advice, it is still unclear who should do it, the reviewer for such a PR will be decided at the next Dotty meeting. -In general, if anything else is unclear for proper fulfillment of responsibilities, the supervisor must proactively seek advice from other team members on Slack or other channels. +In general, if anything else is unclear for the proper fulfillment of responsibilities, the supervisor must proactively seek advice from other team members on Slack or other channels. ## Reporting + At the end of their supervision period, the supervisor reports to the team during the Dotty meeting on the following points: - Whether there were any incidents with the CI, nightlies and benchmarks, how they were resolved and what steps were taken to prevent them from happening in the future. @@ -46,32 +55,42 @@ At the end of their supervision period, the supervisor reports to the team durin - How many new issues were opened during their supervision period? Were there any areas that got a lot of issues? How many regressions from a prior Scala 3 release were there? Which were designated for an MSc project or an Issue Spree? - If new labels were created or old ones were removed, or there is any other feedback on how to improve the issue supervision, mention that. - Unassigned PRs and issues that the team failed to classify: bring them one by one so that the team can make a decision on them. +- Issues of importance – candidates for the Future Versions milestone. + +## Maintenance List -# Maintenance List The following is the list of all the principal areas of the compiler and the core team members who are responsible for their maintenance: +### Compiler - Parser: @odersky - Typer: @odersky, @smarter, (@dwijnand) - Erasure: @smarter, @odersky - Enums: @bishabosha +- Derivation & Mirrors: @bishabosha, (@dwijnand) - Export: @bishabosha, @odersky - Pattern Matching: @dwijnand, (@liufengyun), @sjrd - Inline: @nicolasstucki, @odersky -- Metaprogramming (Quotes, Reflect, Staging): @nicolasstucki, @aherlihy -- Match types: @OlivierBlanvillain, @dwijnand -- GADT: @abgruszecki, @dwijnand -- Scaladoc: @KacperFKorban, @BarkingBad, @pikinier20 -- Initialization checker: @olhotak, @liufengyun, @anatoliykmetyuk +- Metaprogramming (Quotes, Reflect, Staging): @nicolasstucki, @jchyb +- Match types: @sjrd, @dwijnand, @Decel +- GADT: @dwijnand, @Linyxus +- Initialization checker: @olhotak, @liufengyun - Safe nulls: @noti0na1, @olhotak +- Lazy vals: @szymon-rd, @sjrd - tailrec: @sjrd, @mbovel - JS backend: @sjrd -- forward compat (-scala-release): @prolativ, @Kordyjan, (@nicolasstucki) -- Benchmarks: @anatoliykmetyuk, @mbovel -- REPL: @dwijnand, @anatoliykmetyuk, @prolativ +- JVM backend: @sjrd +- Java-compat: @smarter + +### Tooling +- REPL: @dwijnand, @prolativ +- Scaladoc: @Florian3k +- SemanticDB: @tanishiking +- Coverage: @TheElectronWill +- Linting (especially unused warnings) / Reporting UX: @szymon-rd + +### Infrastructure - CI: @anatoliykmetyuk - Community Build: @anatoliykmetyuk +- Open Community Build: @WojciechMazur - Vulpix: @dwijnand, @prolativ -- JVM backend: @Kordyjan, (@sjrd) -- Derivation & Mirrors: @bishabosha, (@dwijnand) -- Linting (especially unused warnings) / Reporting UX : VirtusLab TBD? -- Java-compat: @Kordyjan +- Benchmarks: @mbovel diff --git a/NOTICE.md b/NOTICE.md index 64ebae49efe5..f4d0e6ed2b5a 100644 --- a/NOTICE.md +++ b/NOTICE.md @@ -1,6 +1,6 @@ Dotty (https://dotty.epfl.ch) -Copyright 2012-2020 EPFL -Copyright 2012-2020 Lightbend, Inc. +Copyright 2012-2023 EPFL +Copyright 2012-2023 Lightbend, Inc. Licensed under the Apache License, Version 2.0 (the "License"): http://www.apache.org/licenses/LICENSE-2.0 diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala index a2aaf3e88570..d413458d0049 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala @@ -18,7 +18,7 @@ class InitializedAccess { @Setup def prepare: Unit = { - holder = new LazyHolder + holder = new LazyHolder holder.value } diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala index 5a6b4ae1686d..8c75f6bb11a2 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala @@ -18,7 +18,7 @@ class InitializedAccessAny { @Setup def prepare: Unit = { - holder = new LazyAnyHolder + holder = new LazyAnyHolder holder.value } diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala index a95cb1de2980..a9fecae6281e 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala @@ -18,7 +18,7 @@ class InitializedAccessGeneric { @Setup def prepare: Unit = { - holder = new LazyGenericHolder[String]("foo") + holder = new LazyGenericHolder[String]("foo") holder.value } diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala new file mode 100644 index 000000000000..2a115ad63496 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala @@ -0,0 +1,30 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations.* +import org.openjdk.jmh.infra.Blackhole +import LazyVals.LazyIntHolder +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedAccessInt { + + var holder: LazyIntHolder = _ + + @Setup + def prepare: Unit = { + holder = new LazyIntHolder + holder.value + } + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(holder) + bh.consume(holder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala index 25cc0f9b288d..e6c6cd5eb2e3 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala @@ -18,7 +18,7 @@ class InitializedAccessString { @Setup def prepare: Unit = { - holder = new LazyStringHolder + holder = new LazyStringHolder holder.value } diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedObject.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedObject.scala new file mode 100644 index 000000000000..672cc4bf6544 --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedObject.scala @@ -0,0 +1,22 @@ +package dotty.tools.benchmarks.lazyvals + +import org.openjdk.jmh.annotations.* +import org.openjdk.jmh.infra.Blackhole +import LazyVals.ObjectHolder +import java.util.concurrent.TimeUnit + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 5) +@Measurement(iterations = 5) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class InitializedObject { + + @Benchmark + def measureInitialized(bh: Blackhole) = { + bh.consume(ObjectHolder) + bh.consume(ObjectHolder.value) + } +} diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala index 0afd93d086be..26ebb7b9d356 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala @@ -21,7 +21,7 @@ object LazyVals { } } } - + class LazyHolder { lazy val value: List[Int] = { @@ -50,4 +50,22 @@ object LazyVals { } } } + + class LazyIntHolder { + lazy val value: Int = { + (System.nanoTime() % 1000).toInt + } + } + + object ObjectHolder { + lazy val value: String = { + System.nanoTime() % 5 match { + case 0 => "abc" + case 1 => "def" + case 2 => "ghi" + case 3 => "jkl" + case 4 => "mno" + } + } + } } diff --git a/changelogs/3.3.0-RC1.md b/changelogs/3.3.0-RC1.md new file mode 100644 index 000000000000..1d632e49032a --- /dev/null +++ b/changelogs/3.3.0-RC1.md @@ -0,0 +1,225 @@ +# Highlights of the release + +- Stabilize new lazy vals [#16614](https://github.com/lampepfl/dotty/pull/16614) +- Experimental Macro annotations [#16392](https://github.com/lampepfl/dotty/pull/16392) [#16454](https://github.com/lampepfl/dotty/pull/16454) [#16534](https://github.com/lampepfl/dotty/pull/16534) +- Fix stability check for inline parameters [#15511](https://github.com/lampepfl/dotty/pull/15511) +- Make `fewerBraces` a standard feature [#16297](https://github.com/lampepfl/dotty/pull/16297) +- Add new front-end phase for unused entities and add support for unused imports [#16157](https://github.com/lampepfl/dotty/pull/16157) +- Implement -Wvalue-discard warning [#15975](https://github.com/lampepfl/dotty/pull/15975) +- Introduce boundary/break control abstraction. [#16612](https://github.com/lampepfl/dotty/pull/16612) + +# Other changes and fixes + +## Annotations + +- Support use-site meta-annotations [#16445](https://github.com/lampepfl/dotty/pull/16445) + +## Desugaring + +- Reuse typed prefix for `applyDynamic` and `applyDynamicNamed` [#16552](https://github.com/lampepfl/dotty/pull/16552) +- Fix object selftype match error [#16441](https://github.com/lampepfl/dotty/pull/16441) + +## Erasure + +- Dealias before checking for outer references in types [#16525](https://github.com/lampepfl/dotty/pull/16525) +- Fix generic signature for type params bounded by primitive [#16442](https://github.com/lampepfl/dotty/pull/16442) +- Avoid EmptyScope.cloneScope crashing, eg on missing references [#16314](https://github.com/lampepfl/dotty/pull/16314) + +## GADTs + +- Inline GADT state restoring in TypeComparer [#16564](https://github.com/lampepfl/dotty/pull/16564) +- Add extension/conversion to GADT selection healing [#16638](https://github.com/lampepfl/dotty/pull/16638) + +## Incremental compilation + +- Unpickle arguments of parent constructors in Templates lazily [#16688](https://github.com/lampepfl/dotty/pull/16688) + +## Initialization + +- Fix #16438: Supply dummy args for erroneous parent call in init check [#16448](https://github.com/lampepfl/dotty/pull/16448) + +## Inline + +- Dealias in ConstantValue, for inline if cond [#16652](https://github.com/lampepfl/dotty/pull/16652) +- Set Span for top level annotations generated in PostTyper [#16378](https://github.com/lampepfl/dotty/pull/16378) +- Interpolate any type vars from comparing against SelectionProto [#16348](https://github.com/lampepfl/dotty/pull/16348) +- Handle binding of beta reduced inlined lambdas [#16377](https://github.com/lampepfl/dotty/pull/16377) +- Do not add dummy RHS to abstract inline methods [#16510](https://github.com/lampepfl/dotty/pull/16510) +- Warn on inline given aliases with functions as RHS [#16499](https://github.com/lampepfl/dotty/pull/16499) +- Support inline overrides in value classes [#16523](https://github.com/lampepfl/dotty/pull/16523) + +## Java interop + +- Represent Java annotations as interfaces so they can be extended, and disallow various misuses of them [#16260](https://github.com/lampepfl/dotty/pull/16260) + +## Opaque Types + +- Delay opaque alias checking until PostTyper [#16644](https://github.com/lampepfl/dotty/pull/16644) + +## Overloading + +- Handle context function arguments in overloading resolution [#16511](https://github.com/lampepfl/dotty/pull/16511) + +## Parser + +- Improve support for Unicode supplementary characters in identifiers and string interpolation (as in Scala 2) [#16278](https://github.com/lampepfl/dotty/pull/16278) +- Require indent after colon at EOL [#16466](https://github.com/lampepfl/dotty/pull/16466) +- Help givens return refined types [#16293](https://github.com/lampepfl/dotty/pull/16293) + +## Pattern Matching + +- Tweak AvoidMap's derivedSelect [#16563](https://github.com/lampepfl/dotty/pull/16563) +- Space: Use RHS of & when refining subtypes [#16573](https://github.com/lampepfl/dotty/pull/16573) +- Freeze constraints in a condition check of maximiseType [#16526](https://github.com/lampepfl/dotty/pull/16526) +- Restrict syntax of typed patterns [#16150](https://github.com/lampepfl/dotty/pull/16150) +- Test case to show that #16252 works with transparent [#16262](https://github.com/lampepfl/dotty/pull/16262) +- Support inline unapplySeq and with leading given parameters [#16358](https://github.com/lampepfl/dotty/pull/16358) +- Handle sealed prefixes in exh checking [#16621](https://github.com/lampepfl/dotty/pull/16621) +- Detect irrefutable quoted patterns [#16674](https://github.com/lampepfl/dotty/pull/16674) + +## Pickling + +- Allow case classes with up to 254 parameters [#16501](https://github.com/lampepfl/dotty/pull/16501) +- Correctly unpickle Scala 2 private case classes in traits [#16519](https://github.com/lampepfl/dotty/pull/16519) + +## Polyfunctions + +- Fix #9996: Crash with function accepting polymorphic function type with singleton result [#16327](https://github.com/lampepfl/dotty/pull/16327) + +## Quotes + +- Remove contents of inline methods [#16345](https://github.com/lampepfl/dotty/pull/16345) +- Fix errors in explicit type annotations in inline match cases [#16257](https://github.com/lampepfl/dotty/pull/16257) +- Handle macro annotation suspends and crashes [#16509](https://github.com/lampepfl/dotty/pull/16509) +- Fix macro annotations `spliceOwner` [#16513](https://github.com/lampepfl/dotty/pull/16513) + +## REPL + +- REPL: Fix crash when printing instances of value classes [#16393](https://github.com/lampepfl/dotty/pull/16393) +- Attempt to fix completion crash [#16267](https://github.com/lampepfl/dotty/pull/16267) +- Fix REPL shadowing bug [#16389](https://github.com/lampepfl/dotty/pull/16389) +- Open up for extensibility [#16276](https://github.com/lampepfl/dotty/pull/16276) +- Don't crash if completions throw [#16687](https://github.com/lampepfl/dotty/pull/16687) + +## Reflection + +- Fix reflect typeMembers to return all members [#15033](https://github.com/lampepfl/dotty/pull/15033) +- Deprecate reflect Flags.Static [#16568](https://github.com/lampepfl/dotty/pull/16568) + +## Reporting + +- Suppress follow-on errors for erroneous import qualifiers [#16658](https://github.com/lampepfl/dotty/pull/16658) +- Fix order in which errors are reported for assignment to val [#16660](https://github.com/lampepfl/dotty/pull/16660) +- Fix class name in error message [#16635](https://github.com/lampepfl/dotty/pull/16635) +- Make refined type printing more source compatible [#16303](https://github.com/lampepfl/dotty/pull/16303) +- Add error hint on local inline def used in quotes [#16572](https://github.com/lampepfl/dotty/pull/16572) +- Fix Text wrapping [#16277](https://github.com/lampepfl/dotty/pull/16277) +- Fix -Wunused:import registering constructor `` instead of its owner (also fix false positive for enum) [#16661](https://github.com/lampepfl/dotty/pull/16661) +- Fix #16675 : -Wunused false positive on case class generated method, due to flags used to distinguish case accessors. [#16683](https://github.com/lampepfl/dotty/pull/16683) +- Fix #16680 by registering Ident not containing a symbol [#16689](https://github.com/lampepfl/dotty/pull/16689) +- Fix #16682: CheckUnused missed some used symbols [#16690](https://github.com/lampepfl/dotty/pull/16690) +- Fix the non-miniphase tree traverser [#16684](https://github.com/lampepfl/dotty/pull/16684) + +## Scala-JS + +- Fix #14289: Accept Ident refs to `js.native` in native member rhs. [#16185](https://github.com/lampepfl/dotty/pull/16185) + +## Standard Library + +- Add `CanEqual` instance for `Map` [#15886](https://github.com/lampepfl/dotty/pull/15886) +- Refine `Tuple.Append` return type [#16140](https://github.com/lampepfl/dotty/pull/16140) + +## TASTy format + +- Make it a fatal error if erasure cannot resolve a type [#16373](https://github.com/lampepfl/dotty/pull/16373) + +## Tooling + +- Add -Yimports compiler flag [#16218](https://github.com/lampepfl/dotty/pull/16218) +- Allow BooleanSettings to be set with a colon [#16425](https://github.com/lampepfl/dotty/pull/16425) + +## Transform + +- Avoid stackoverflow in ExplicitOuter [#16381](https://github.com/lampepfl/dotty/pull/16381) +- Make lazy vals run on non-fallback graal image - remove dynamic reflection [#16346](https://github.com/lampepfl/dotty/pull/16346) +- Patch to avoid crash in #16351 [#16354](https://github.com/lampepfl/dotty/pull/16354) +- Don't treat package object's `` methods as package members [#16667](https://github.com/lampepfl/dotty/pull/16667) +- Space: Refine isSubspace property & an example [#16574](https://github.com/lampepfl/dotty/pull/16574) + +## Typer + +- Drop requirement that self types are closed [#16648](https://github.com/lampepfl/dotty/pull/16648) +- Disallow constructor params from appearing in parent types for soundness [#16664](https://github.com/lampepfl/dotty/pull/16664) +- Don't search implicit arguments in singleton type prefix [#16490](https://github.com/lampepfl/dotty/pull/16490) +- Don't rely on isProvisional to determine whether atoms computed [#16489](https://github.com/lampepfl/dotty/pull/16489) +- Support signature polymorphic methods (`MethodHandle` and `VarHandle`) [#16225](https://github.com/lampepfl/dotty/pull/16225) +- Prefer parameterless alternatives during ambiguous overload resolution [#16315](https://github.com/lampepfl/dotty/pull/16315) +- Fix calculation to drop transparent classes [#16344](https://github.com/lampepfl/dotty/pull/16344) +- Test case for issue 16311 [#16317](https://github.com/lampepfl/dotty/pull/16317) +- Skip caching provisional OrType atoms [#16295](https://github.com/lampepfl/dotty/pull/16295) +- Avoid cyclic references due to experimental check when inlining [#16195](https://github.com/lampepfl/dotty/pull/16195) +- Track type variable dependencies to guide instantiation decisions [#16042](https://github.com/lampepfl/dotty/pull/16042) +- Two fixes to constraint solving [#16353](https://github.com/lampepfl/dotty/pull/16353) +- Fix regression in cyclic constraint handling [#16514](https://github.com/lampepfl/dotty/pull/16514) +- Sharpen range approximation for applied types with capture set ranges [#16261](https://github.com/lampepfl/dotty/pull/16261) +- Cut the Gordian Knot: Don't widen unions to transparent [#15642](https://github.com/lampepfl/dotty/pull/15642) +- Fix widening logic to keep instantiation within bounds [#16417](https://github.com/lampepfl/dotty/pull/16417) +- Skip ambiguous reference error when symbols are aliases [#16401](https://github.com/lampepfl/dotty/pull/16401) +- Avoid incorrect simplifications when updating bounds in the constraint [#16410](https://github.com/lampepfl/dotty/pull/16410) +- Take `@targetName` into account when resolving extension methods [#16487](https://github.com/lampepfl/dotty/pull/16487) +- Improve ClassTag handling to avoid invalid ClassTag generation and inference failure [#16492](https://github.com/lampepfl/dotty/pull/16492) +- Fix extracting the elemType of a union of arrays [#16569](https://github.com/lampepfl/dotty/pull/16569) +- Make sure annotations are typed in expression contexts [#16699](https://github.com/lampepfl/dotty/pull/16699) +- Throw a type error when using hk-types in unions or intersections [#16712](https://github.com/lampepfl/dotty/pull/16712) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.2..3.3.0-RC1` these are: + +``` + 225 Martin Odersky + 73 Dale Wijnand + 58 Szymon Rodziewicz + 54 Nicolas Stucki + 48 Kamil Szewczyk + 48 Paul Coral + 30 Paweł Marks + 28 Florian3k + 28 Yichen Xu + 14 Guillaume Martres + 8 Fengyun Liu + 8 Michał Pałka + 7 Chris Birchall + 7 rochala + 6 Kacper Korban + 6 Sébastien Doeraene + 6 jdudrak + 5 Seth Tisue + 5 Som Snytt + 5 nizhikov + 4 Filip Zybała + 4 Jan Chyb + 4 Michael Pollmeier + 4 Natsu Kagami + 3 Jamie Thompson + 2 Alex + 2 Anatolii Kmetiuk + 2 Dmitrii Naumenko + 2 Lukas Rytz + 2 adampauls + 2 yoshinorin + 1 Alexander Slesarenko + 1 Chris Kipp + 1 Guillaume Raffin + 1 Jakub Kozłowski + 1 Jan-Pieter van den Heuvel + 1 Julien Richard-Foy + 1 Kenji Yoshida + 1 Philippus + 1 Szymon R + 1 Tim Spence + 1 s.bazarsadaev + +``` \ No newline at end of file diff --git a/changelogs/3.3.0-RC2.md b/changelogs/3.3.0-RC2.md new file mode 100644 index 000000000000..57d785816489 --- /dev/null +++ b/changelogs/3.3.0-RC2.md @@ -0,0 +1,229 @@ +This release is nearly identical to 3.3.0-RC1. The only difference is that 3.3.0-RC1 generated output with incorrect TASTy version. + +The following changelog is identical to the changelog of 3.3.0-RC1. + +# Highlights of the release + +- Stabilize new lazy vals [#16614](https://github.com/lampepfl/dotty/pull/16614) +- Experimental Macro annotations [#16392](https://github.com/lampepfl/dotty/pull/16392) [#16454](https://github.com/lampepfl/dotty/pull/16454) [#16534](https://github.com/lampepfl/dotty/pull/16534) +- Fix stability check for inline parameters [#15511](https://github.com/lampepfl/dotty/pull/15511) +- Make `fewerBraces` a standard feature [#16297](https://github.com/lampepfl/dotty/pull/16297) +- Add new front-end phase for unused entities and add support for unused imports [#16157](https://github.com/lampepfl/dotty/pull/16157) +- Implement -Wvalue-discard warning [#15975](https://github.com/lampepfl/dotty/pull/15975) +- Introduce boundary/break control abstraction. [#16612](https://github.com/lampepfl/dotty/pull/16612) + +# Other changes and fixes + +## Annotations + +- Support use-site meta-annotations [#16445](https://github.com/lampepfl/dotty/pull/16445) + +## Desugaring + +- Reuse typed prefix for `applyDynamic` and `applyDynamicNamed` [#16552](https://github.com/lampepfl/dotty/pull/16552) +- Fix object selftype match error [#16441](https://github.com/lampepfl/dotty/pull/16441) + +## Erasure + +- Dealias before checking for outer references in types [#16525](https://github.com/lampepfl/dotty/pull/16525) +- Fix generic signature for type params bounded by primitive [#16442](https://github.com/lampepfl/dotty/pull/16442) +- Avoid EmptyScope.cloneScope crashing, eg on missing references [#16314](https://github.com/lampepfl/dotty/pull/16314) + +## GADTs + +- Inline GADT state restoring in TypeComparer [#16564](https://github.com/lampepfl/dotty/pull/16564) +- Add extension/conversion to GADT selection healing [#16638](https://github.com/lampepfl/dotty/pull/16638) + +## Incremental compilation + +- Unpickle arguments of parent constructors in Templates lazily [#16688](https://github.com/lampepfl/dotty/pull/16688) + +## Initialization + +- Fix #16438: Supply dummy args for erroneous parent call in init check [#16448](https://github.com/lampepfl/dotty/pull/16448) + +## Inline + +- Dealias in ConstantValue, for inline if cond [#16652](https://github.com/lampepfl/dotty/pull/16652) +- Set Span for top level annotations generated in PostTyper [#16378](https://github.com/lampepfl/dotty/pull/16378) +- Interpolate any type vars from comparing against SelectionProto [#16348](https://github.com/lampepfl/dotty/pull/16348) +- Handle binding of beta reduced inlined lambdas [#16377](https://github.com/lampepfl/dotty/pull/16377) +- Do not add dummy RHS to abstract inline methods [#16510](https://github.com/lampepfl/dotty/pull/16510) +- Warn on inline given aliases with functions as RHS [#16499](https://github.com/lampepfl/dotty/pull/16499) +- Support inline overrides in value classes [#16523](https://github.com/lampepfl/dotty/pull/16523) + +## Java interop + +- Represent Java annotations as interfaces so they can be extended, and disallow various misuses of them [#16260](https://github.com/lampepfl/dotty/pull/16260) + +## Opaque Types + +- Delay opaque alias checking until PostTyper [#16644](https://github.com/lampepfl/dotty/pull/16644) + +## Overloading + +- Handle context function arguments in overloading resolution [#16511](https://github.com/lampepfl/dotty/pull/16511) + +## Parser + +- Improve support for Unicode supplementary characters in identifiers and string interpolation (as in Scala 2) [#16278](https://github.com/lampepfl/dotty/pull/16278) +- Require indent after colon at EOL [#16466](https://github.com/lampepfl/dotty/pull/16466) +- Help givens return refined types [#16293](https://github.com/lampepfl/dotty/pull/16293) + +## Pattern Matching + +- Tweak AvoidMap's derivedSelect [#16563](https://github.com/lampepfl/dotty/pull/16563) +- Space: Use RHS of & when refining subtypes [#16573](https://github.com/lampepfl/dotty/pull/16573) +- Freeze constraints in a condition check of maximiseType [#16526](https://github.com/lampepfl/dotty/pull/16526) +- Restrict syntax of typed patterns [#16150](https://github.com/lampepfl/dotty/pull/16150) +- Test case to show that #16252 works with transparent [#16262](https://github.com/lampepfl/dotty/pull/16262) +- Support inline unapplySeq and with leading given parameters [#16358](https://github.com/lampepfl/dotty/pull/16358) +- Handle sealed prefixes in exh checking [#16621](https://github.com/lampepfl/dotty/pull/16621) +- Detect irrefutable quoted patterns [#16674](https://github.com/lampepfl/dotty/pull/16674) + +## Pickling + +- Allow case classes with up to 254 parameters [#16501](https://github.com/lampepfl/dotty/pull/16501) +- Correctly unpickle Scala 2 private case classes in traits [#16519](https://github.com/lampepfl/dotty/pull/16519) + +## Polyfunctions + +- Fix #9996: Crash with function accepting polymorphic function type with singleton result [#16327](https://github.com/lampepfl/dotty/pull/16327) + +## Quotes + +- Remove contents of inline methods [#16345](https://github.com/lampepfl/dotty/pull/16345) +- Fix errors in explicit type annotations in inline match cases [#16257](https://github.com/lampepfl/dotty/pull/16257) +- Handle macro annotation suspends and crashes [#16509](https://github.com/lampepfl/dotty/pull/16509) +- Fix macro annotations `spliceOwner` [#16513](https://github.com/lampepfl/dotty/pull/16513) + +## REPL + +- REPL: Fix crash when printing instances of value classes [#16393](https://github.com/lampepfl/dotty/pull/16393) +- Attempt to fix completion crash [#16267](https://github.com/lampepfl/dotty/pull/16267) +- Fix REPL shadowing bug [#16389](https://github.com/lampepfl/dotty/pull/16389) +- Open up for extensibility [#16276](https://github.com/lampepfl/dotty/pull/16276) +- Don't crash if completions throw [#16687](https://github.com/lampepfl/dotty/pull/16687) + +## Reflection + +- Fix reflect typeMembers to return all members [#15033](https://github.com/lampepfl/dotty/pull/15033) +- Deprecate reflect Flags.Static [#16568](https://github.com/lampepfl/dotty/pull/16568) + +## Reporting + +- Suppress follow-on errors for erroneous import qualifiers [#16658](https://github.com/lampepfl/dotty/pull/16658) +- Fix order in which errors are reported for assignment to val [#16660](https://github.com/lampepfl/dotty/pull/16660) +- Fix class name in error message [#16635](https://github.com/lampepfl/dotty/pull/16635) +- Make refined type printing more source compatible [#16303](https://github.com/lampepfl/dotty/pull/16303) +- Add error hint on local inline def used in quotes [#16572](https://github.com/lampepfl/dotty/pull/16572) +- Fix Text wrapping [#16277](https://github.com/lampepfl/dotty/pull/16277) +- Fix -Wunused:import registering constructor `` instead of its owner (also fix false positive for enum) [#16661](https://github.com/lampepfl/dotty/pull/16661) +- Fix #16675 : -Wunused false positive on case class generated method, due to flags used to distinguish case accessors. [#16683](https://github.com/lampepfl/dotty/pull/16683) +- Fix #16680 by registering Ident not containing a symbol [#16689](https://github.com/lampepfl/dotty/pull/16689) +- Fix #16682: CheckUnused missed some used symbols [#16690](https://github.com/lampepfl/dotty/pull/16690) +- Fix the non-miniphase tree traverser [#16684](https://github.com/lampepfl/dotty/pull/16684) + +## Scala-JS + +- Fix #14289: Accept Ident refs to `js.native` in native member rhs. [#16185](https://github.com/lampepfl/dotty/pull/16185) + +## Standard Library + +- Add `CanEqual` instance for `Map` [#15886](https://github.com/lampepfl/dotty/pull/15886) +- Refine `Tuple.Append` return type [#16140](https://github.com/lampepfl/dotty/pull/16140) + +## TASTy format + +- Make it a fatal error if erasure cannot resolve a type [#16373](https://github.com/lampepfl/dotty/pull/16373) + +## Tooling + +- Add -Yimports compiler flag [#16218](https://github.com/lampepfl/dotty/pull/16218) +- Allow BooleanSettings to be set with a colon [#16425](https://github.com/lampepfl/dotty/pull/16425) + +## Transform + +- Avoid stackoverflow in ExplicitOuter [#16381](https://github.com/lampepfl/dotty/pull/16381) +- Make lazy vals run on non-fallback graal image - remove dynamic reflection [#16346](https://github.com/lampepfl/dotty/pull/16346) +- Patch to avoid crash in #16351 [#16354](https://github.com/lampepfl/dotty/pull/16354) +- Don't treat package object's `` methods as package members [#16667](https://github.com/lampepfl/dotty/pull/16667) +- Space: Refine isSubspace property & an example [#16574](https://github.com/lampepfl/dotty/pull/16574) + +## Typer + +- Drop requirement that self types are closed [#16648](https://github.com/lampepfl/dotty/pull/16648) +- Disallow constructor params from appearing in parent types for soundness [#16664](https://github.com/lampepfl/dotty/pull/16664) +- Don't search implicit arguments in singleton type prefix [#16490](https://github.com/lampepfl/dotty/pull/16490) +- Don't rely on isProvisional to determine whether atoms computed [#16489](https://github.com/lampepfl/dotty/pull/16489) +- Support signature polymorphic methods (`MethodHandle` and `VarHandle`) [#16225](https://github.com/lampepfl/dotty/pull/16225) +- Prefer parameterless alternatives during ambiguous overload resolution [#16315](https://github.com/lampepfl/dotty/pull/16315) +- Fix calculation to drop transparent classes [#16344](https://github.com/lampepfl/dotty/pull/16344) +- Test case for issue 16311 [#16317](https://github.com/lampepfl/dotty/pull/16317) +- Skip caching provisional OrType atoms [#16295](https://github.com/lampepfl/dotty/pull/16295) +- Avoid cyclic references due to experimental check when inlining [#16195](https://github.com/lampepfl/dotty/pull/16195) +- Track type variable dependencies to guide instantiation decisions [#16042](https://github.com/lampepfl/dotty/pull/16042) +- Two fixes to constraint solving [#16353](https://github.com/lampepfl/dotty/pull/16353) +- Fix regression in cyclic constraint handling [#16514](https://github.com/lampepfl/dotty/pull/16514) +- Sharpen range approximation for applied types with capture set ranges [#16261](https://github.com/lampepfl/dotty/pull/16261) +- Cut the Gordian Knot: Don't widen unions to transparent [#15642](https://github.com/lampepfl/dotty/pull/15642) +- Fix widening logic to keep instantiation within bounds [#16417](https://github.com/lampepfl/dotty/pull/16417) +- Skip ambiguous reference error when symbols are aliases [#16401](https://github.com/lampepfl/dotty/pull/16401) +- Avoid incorrect simplifications when updating bounds in the constraint [#16410](https://github.com/lampepfl/dotty/pull/16410) +- Take `@targetName` into account when resolving extension methods [#16487](https://github.com/lampepfl/dotty/pull/16487) +- Improve ClassTag handling to avoid invalid ClassTag generation and inference failure [#16492](https://github.com/lampepfl/dotty/pull/16492) +- Fix extracting the elemType of a union of arrays [#16569](https://github.com/lampepfl/dotty/pull/16569) +- Make sure annotations are typed in expression contexts [#16699](https://github.com/lampepfl/dotty/pull/16699) +- Throw a type error when using hk-types in unions or intersections [#16712](https://github.com/lampepfl/dotty/pull/16712) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.2..3.3.0-RC1` these are: + +``` + 225 Martin Odersky + 73 Dale Wijnand + 58 Szymon Rodziewicz + 54 Nicolas Stucki + 48 Kamil Szewczyk + 48 Paul Coral + 30 Paweł Marks + 28 Florian3k + 28 Yichen Xu + 14 Guillaume Martres + 8 Fengyun Liu + 8 Michał Pałka + 7 Chris Birchall + 7 rochala + 6 Kacper Korban + 6 Sébastien Doeraene + 6 jdudrak + 5 Seth Tisue + 5 Som Snytt + 5 nizhikov + 4 Filip Zybała + 4 Jan Chyb + 4 Michael Pollmeier + 4 Natsu Kagami + 3 Jamie Thompson + 2 Alex + 2 Anatolii Kmetiuk + 2 Dmitrii Naumenko + 2 Lukas Rytz + 2 adampauls + 2 yoshinorin + 1 Alexander Slesarenko + 1 Chris Kipp + 1 Guillaume Raffin + 1 Jakub Kozłowski + 1 Jan-Pieter van den Heuvel + 1 Julien Richard-Foy + 1 Kenji Yoshida + 1 Philippus + 1 Szymon R + 1 Tim Spence + 1 s.bazarsadaev + +``` \ No newline at end of file diff --git a/changelogs/3.3.0-RC3.md b/changelogs/3.3.0-RC3.md new file mode 100644 index 000000000000..79a47fcf0bb9 --- /dev/null +++ b/changelogs/3.3.0-RC3.md @@ -0,0 +1,23 @@ +# Backported fixes + +- Added jpath check to `ClassLikeSupport` getParentsAsTreeSymbolTuples [#16759](https://github.com/lampepfl/dotty/pull/16759) +- Split out immutable GadtConstraint [#16602](https://github.com/lampepfl/dotty/pull/16602) +- Avoid bidirectional GADT typebounds from fullBounds [#15683](https://github.com/lampepfl/dotty/pull/15683) +- Fix static lazy field holder for GraalVM [#16800](https://github.com/lampepfl/dotty/pull/16800) +- Add support for disabling redirected output in the REPL driver for usage in worksheets in the Scala Plugin for IntelliJ IDEA [#16810](https://github.com/lampepfl/dotty/pull/16810) +- Add missing criterion to subtype check [#16889](https://github.com/lampepfl/dotty/pull/16889) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.0-RC2..3.3.0-RC3` these are: + +``` + 7 Dale Wijnand + 5 Szymon Rodziewicz + 2 Paweł Marks + 2 Vasil Vasilev + 1 Martin Odersky + 1 Mohammad Yousuf Minhaj Zia +``` diff --git a/changelogs/3.3.0-RC4.md b/changelogs/3.3.0-RC4.md new file mode 100644 index 000000000000..4c4a490237b6 --- /dev/null +++ b/changelogs/3.3.0-RC4.md @@ -0,0 +1,35 @@ +# Backported fixes + +- Fix HK quoted pattern type variables [#16907](https//github.com/lampepfl/dotty/pull/16907) +- Fix caching issue caused by incorrect isProvisional check [#16989](https://github.com/lampepfl/dotty/pull/16989) +- Fix race condition in new LazyVals [#16975](https://github.com/lampepfl/dotty/pull/16975) +- Fix "-Wunused: False positive on parameterless enum member" [#16927](https://github.com/lampepfl/dotty/pull/16927) +- Register usage of symbols in non-inferred type trees in CheckUnused [#16939](https://github.com/lampepfl/dotty/pull/16939) +- Traverse annotations instead of just registering in -W [#16956](https://github.com/lampepfl/dotty/pull/16956) +- Ignore parameter of accessors in -Wunused [#16957](https://github.com/lampepfl/dotty/pull/16957) +- Improve override detection in CheckUnused [#16965](https://github.com/lampepfl/dotty/pull/16965) +- WUnused: Fix unused warning in synthetic symbols [#17020](https://github.com/lampepfl/dotty/pull/17020) +- Fix WUnused with idents in derived code [#17095](https//github.com/lampepfl/dotty/pull/17095) +- WUnused: Fix for symbols with synthetic names and unused transparent inlines [#17061](https//github.com/lampepfl/dotty/pull/17061) +- Skip extension method params in WUnused [#17178](https//github.com/lampepfl/dotty/pull/17178) +- Fix wunused false positive when deriving alias type [#17157](https//github.com/lampepfl/dotty/pull/17157) +- Fix WUnused for accessible symbols that are renamed [#17177](https//github.com/lampepfl/dotty/pull/17177) +- Fix WUnused false positive in for [#17176](https//github.com/lampepfl/dotty/pull/17176) +- Make CheckUnused run both after Typer and Inlining [#17206](https//github.com/lampepfl/dotty/pull/17206) +- Disable WUnused for params of non-private defs [#17223](https//github.com/lampepfl/dotty/pull/17223) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.0-RC3..3.3.0-RC4` these are: + +``` + 41 Szymon Rodziewicz + 4 Paul Coral + 3 Paweł Marks + 1 Guillaume Martres + 1 Kacper Korban + 1 Nicolas Stucki + +``` diff --git a/changelogs/3.3.0-RC5.md b/changelogs/3.3.0-RC5.md new file mode 100644 index 000000000000..a9cc120ae39a --- /dev/null +++ b/changelogs/3.3.0-RC5.md @@ -0,0 +1,22 @@ +# Backported fixes + +- Remove experimental from `Mirror#fromProductTyped` [#16829](https//github.com/lampepfl/dotty/pull/16829) +- Wunused: Check if symbol exists before `isValidMemberDef` check [#17316](https://github.com/lampepfl/dotty/pull/17316) +- Wunused: Include import selector bounds in unused checks [#17323](https://github.com/lampepfl/dotty/pull/17323) +- Fix compiler crash in WUnused [#17340](https://github.com/lampepfl/dotty/pull/17340) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.0-RC4..3.3.0-RC5` these are: + +``` + 2 Kacper Korban + 2 Michael Pilquist + 2 Paweł Marks + 2 Szymon Rodziewicz + 1 Matt Bovel + + +``` diff --git a/changelogs/3.3.0-RC6.md b/changelogs/3.3.0-RC6.md new file mode 100644 index 000000000000..ab98f0055974 --- /dev/null +++ b/changelogs/3.3.0-RC6.md @@ -0,0 +1,18 @@ +# Backported fixes + +- Patmat: Use less type variables in prefix inference [#16827](https//github.com/lampepfl/dotty/pull/16827) +- Just warn on type ascription on a pattern [#17454](https://github.com/lampepfl/dotty/pull/17454) +- Fix #17187: allow patches with same span [#17366](https://github.com/lampepfl/dotty/pull/17366) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.0-RC5..3.3.0-RC6` these are: + +``` + 2 Adrien Piquerez + 2 Michał Pałka + 2 Paweł Marks + 1 Dale Wijnand +``` diff --git a/changelogs/3.3.0.md b/changelogs/3.3.0.md new file mode 100644 index 000000000000..e3cc3703fadd --- /dev/null +++ b/changelogs/3.3.0.md @@ -0,0 +1,268 @@ +# Highlights of the release + +- Stabilize new lazy vals [#16614](https://github.com/lampepfl/dotty/pull/16614) +- Experimental Macro annotations [#16392](https://github.com/lampepfl/dotty/pull/16392) [#16454](https://github.com/lampepfl/dotty/pull/16454) [#16534](https://github.com/lampepfl/dotty/pull/16534) +- Fix stability check for inline parameters [#15511](https://github.com/lampepfl/dotty/pull/15511) +- Make `fewerBraces` a standard feature [#16297](https://github.com/lampepfl/dotty/pull/16297) +- Add new front-end phase for unused entities and add support for unused imports [#16157](https://github.com/lampepfl/dotty/pull/16157) +- Implement -Wvalue-discard warning [#15975](https://github.com/lampepfl/dotty/pull/15975) +- Introduce boundary/break control abstraction. [#16612](https://github.com/lampepfl/dotty/pull/16612) + +# Other changes and fixes + +## Annotations + +- Support use-site meta-annotations [#16445](https://github.com/lampepfl/dotty/pull/16445) + +## Desugaring + +- Reuse typed prefix for `applyDynamic` and `applyDynamicNamed` [#16552](https://github.com/lampepfl/dotty/pull/16552) +- Fix object selftype match error [#16441](https://github.com/lampepfl/dotty/pull/16441) + +## Erasure + +- Dealias before checking for outer references in types [#16525](https://github.com/lampepfl/dotty/pull/16525) +- Fix generic signature for type params bounded by primitive [#16442](https://github.com/lampepfl/dotty/pull/16442) +- Avoid EmptyScope.cloneScope crashing, eg on missing references [#16314](https://github.com/lampepfl/dotty/pull/16314) + +## GADTs + +- Inline GADT state restoring in TypeComparer [#16564](https://github.com/lampepfl/dotty/pull/16564) +- Add extension/conversion to GADT selection healing [#16638](https://github.com/lampepfl/dotty/pull/16638) +- Split out immutable GadtConstraint [#16602](https://github.com/lampepfl/dotty/pull/16602) +- Avoid bidirectional GADT typebounds from fullBounds [#15683](https://github.com/lampepfl/dotty/pull/15683) + +## Incremental compilation + +- Unpickle arguments of parent constructors in Templates lazily [#16688](https://github.com/lampepfl/dotty/pull/16688) + +## Initialization + +- Fix #16438: Supply dummy args for erroneous parent call in init check [#16448](https://github.com/lampepfl/dotty/pull/16448) + +## Inline + +- Dealias in ConstantValue, for inline if cond [#16652](https://github.com/lampepfl/dotty/pull/16652) +- Set Span for top level annotations generated in PostTyper [#16378](https://github.com/lampepfl/dotty/pull/16378) +- Interpolate any type vars from comparing against SelectionProto [#16348](https://github.com/lampepfl/dotty/pull/16348) +- Handle binding of beta reduced inlined lambdas [#16377](https://github.com/lampepfl/dotty/pull/16377) +- Do not add dummy RHS to abstract inline methods [#16510](https://github.com/lampepfl/dotty/pull/16510) +- Warn on inline given aliases with functions as RHS [#16499](https://github.com/lampepfl/dotty/pull/16499) +- Support inline overrides in value classes [#16523](https://github.com/lampepfl/dotty/pull/16523) + +## Java interop + +- Represent Java annotations as interfaces so they can be extended, and disallow various misuses of them [#16260](https://github.com/lampepfl/dotty/pull/16260) + +## Linting + +- Fix -Wunused:import registering constructor `` instead of its owner (also fix false positive for enum) [#16661](https://github.com/lampepfl/dotty/pull/16661) +- Fix #16675 : -Wunused false positive on case class generated method, due to flags used to distinguish case accessors. [#16683](https://github.com/lampepfl/dotty/pull/16683) +- Fix #16682: CheckUnused missed some used symbols [#16690](https://github.com/lampepfl/dotty/pull/16690) +- Fix "-Wunused: False positive on parameterless enum member" [#16927](https://github.com/lampepfl/dotty/pull/16927) +- Register usage of symbols in non-inferred type trees in CheckUnused [#16939](https://github.com/lampepfl/dotty/pull/16939) +- Traverse annotations instead of just registering in -Wunused [#16956](https://github.com/lampepfl/dotty/pull/16956) +- Ignore parameter of accessors in -Wunused [#16957](https://github.com/lampepfl/dotty/pull/16957) +- Ignore parameter of accessors in -Wunused [#16957](https://github.com/lampepfl/dotty/pull/16957) +- Improve override detection in CheckUnused [#16965](https://github.com/lampepfl/dotty/pull/16965) +- WUnused: Fix unused warning in synthetic symbols [#17020](https://github.com/lampepfl/dotty/pull/17020) +- Fix WUnused with idents in derived code [#17095](https//github.com/lampepfl/dotty/pull/17095) +- WUnused: Fix for symbols with synthetic names and unused transparent inlines [#17061](https//github.com/lampepfl/dotty/pull/17061) +- Skip extension method params in WUnused [#17178](https//github.com/lampepfl/dotty/pull/17178) +- Fix wunused false positive when deriving alias type [#17157](https//github.com/lampepfl/dotty/pull/17157) +- Fix WUnused for accessible symbols that are renamed [#17177](https//github.com/lampepfl/dotty/pull/17177) +- Fix WUnused false positive in for [#17176](https//github.com/lampepfl/dotty/pull/17176) +- Make CheckUnused run both after Typer and Inlining [#17206](https//github.com/lampepfl/dotty/pull/17206) +- Disable WUnused for params of non-private defs [#17223](https//github.com/lampepfl/dotty/pull/17223) +- Wunused: Check if symbol exists before `isValidMemberDef` check [#17316](https://github.com/lampepfl/dotty/pull/17316) +- Wunused: Include import selector bounds in unused checks [#17323](https://github.com/lampepfl/dotty/pull/17323) +- Fix compiler crash in WUnused [#17340](https://github.com/lampepfl/dotty/pull/17340) + +## Opaque Types + +- Delay opaque alias checking until PostTyper [#16644](https://github.com/lampepfl/dotty/pull/16644) + +## Overloading + +- Handle context function arguments in overloading resolution [#16511](https://github.com/lampepfl/dotty/pull/16511) + +## Parser + +- Improve support for Unicode supplementary characters in identifiers and string interpolation (as in Scala 2) [#16278](https://github.com/lampepfl/dotty/pull/16278) +- Require indent after colon at EOL [#16466](https://github.com/lampepfl/dotty/pull/16466) +- Help givens return refined types [#16293](https://github.com/lampepfl/dotty/pull/16293) + +## Pattern Matching + +- Tweak AvoidMap's derivedSelect [#16563](https://github.com/lampepfl/dotty/pull/16563) +- Space: Use RHS of & when refining subtypes [#16573](https://github.com/lampepfl/dotty/pull/16573) +- Freeze constraints in a condition check of maximiseType [#16526](https://github.com/lampepfl/dotty/pull/16526) +- Restrict syntax of typed patterns [#16150](https://github.com/lampepfl/dotty/pull/16150) +- Test case to show that #16252 works with transparent [#16262](https://github.com/lampepfl/dotty/pull/16262) +- Support inline unapplySeq and with leading given parameters [#16358](https://github.com/lampepfl/dotty/pull/16358) +- Handle sealed prefixes in exh checking [#16621](https://github.com/lampepfl/dotty/pull/16621) +- Detect irrefutable quoted patterns [#16674](https://github.com/lampepfl/dotty/pull/16674) +- Patmat: Use less type variables in prefix inference [#16827](https//github.com/lampepfl/dotty/pull/16827) + +## Pickling + +- Allow case classes with up to 254 parameters [#16501](https://github.com/lampepfl/dotty/pull/16501) +- Correctly unpickle Scala 2 private case classes in traits [#16519](https://github.com/lampepfl/dotty/pull/16519) + +## Polyfunctions + +- Fix #9996: Crash with function accepting polymorphic function type with singleton result [#16327](https://github.com/lampepfl/dotty/pull/16327) + +## Quotes + +- Remove contents of inline methods [#16345](https://github.com/lampepfl/dotty/pull/16345) +- Fix errors in explicit type annotations in inline match cases [#16257](https://github.com/lampepfl/dotty/pull/16257) +- Handle macro annotation suspends and crashes [#16509](https://github.com/lampepfl/dotty/pull/16509) +- Fix macro annotations `spliceOwner` [#16513](https://github.com/lampepfl/dotty/pull/16513) +- Fix HK quoted pattern type variables [#16907](https//github.com/lampepfl/dotty/pull/16907) + +## REPL + +- REPL: Fix crash when printing instances of value classes [#16393](https://github.com/lampepfl/dotty/pull/16393) +- Attempt to fix completion crash [#16267](https://github.com/lampepfl/dotty/pull/16267) +- Fix REPL shadowing bug [#16389](https://github.com/lampepfl/dotty/pull/16389) +- Open up for extensibility [#16276](https://github.com/lampepfl/dotty/pull/16276) +- Don't crash if completions throw [#16687](https://github.com/lampepfl/dotty/pull/16687) + +## Reflection + +- Fix reflect typeMembers to return all members [#15033](https://github.com/lampepfl/dotty/pull/15033) +- Deprecate reflect Flags.Static [#16568](https://github.com/lampepfl/dotty/pull/16568) + +## Reporting + +- Suppress follow-on errors for erroneous import qualifiers [#16658](https://github.com/lampepfl/dotty/pull/16658) +- Fix order in which errors are reported for assignment to val [#16660](https://github.com/lampepfl/dotty/pull/16660) +- Fix class name in error message [#16635](https://github.com/lampepfl/dotty/pull/16635) +- Make refined type printing more source compatible [#16303](https://github.com/lampepfl/dotty/pull/16303) +- Add error hint on local inline def used in quotes [#16572](https://github.com/lampepfl/dotty/pull/16572) +- Fix Text wrapping [#16277](https://github.com/lampepfl/dotty/pull/16277) +- Fix #16680 by registering Ident not containing a symbol [#16689](https://github.com/lampepfl/dotty/pull/16689) +- Fix the non-miniphase tree traverser [#16684](https://github.com/lampepfl/dotty/pull/16684) +- Just warn on type ascription on a pattern [#17454](https://github.com/lampepfl/dotty/pull/17454) + +## Scala-JS + +- Fix #14289: Accept Ident refs to `js.native` in native member rhs. [#16185](https://github.com/lampepfl/dotty/pull/16185) + +## Scaladoc + +- Added jpath check to `ClassLikeSupport` getParentsAsTreeSymbolTuples [#16759](https://github.com/lampepfl/dotty/pull/16759) + +## Standard Library + +- Add `CanEqual` instance for `Map` [#15886](https://github.com/lampepfl/dotty/pull/15886) +- Refine `Tuple.Append` return type [#16140](https://github.com/lampepfl/dotty/pull/16140) +- Remove experimental from `Mirror#fromProductTyped` [#16829](https//github.com/lampepfl/dotty/pull/16829) + +## TASTy format + +- Make it a fatal error if erasure cannot resolve a type [#16373](https://github.com/lampepfl/dotty/pull/16373) + +## Tooling + +- Add -Yimports compiler flag [#16218](https://github.com/lampepfl/dotty/pull/16218) +- Allow BooleanSettings to be set with a colon [#16425](https://github.com/lampepfl/dotty/pull/16425) +- Add support for disabling redirected output in the REPL driver for usage in worksheets in the Scala Plugin for IntelliJ IDEA [#16810](https://github.com/lampepfl/dotty/pull/16810) +- Fix #17187: allow patches with same span [#17366](https://github.com/lampepfl/dotty/pull/17366) + +## Transform + +- Avoid stackoverflow in ExplicitOuter [#16381](https://github.com/lampepfl/dotty/pull/16381) +- Make lazy vals run on non-fallback graal image - remove dynamic reflection [#16346](https://github.com/lampepfl/dotty/pull/16346) +- Patch to avoid crash in #16351 [#16354](https://github.com/lampepfl/dotty/pull/16354) +- Don't treat package object's `` methods as package members [#16667](https://github.com/lampepfl/dotty/pull/16667) +- Space: Refine isSubspace property & an example [#16574](https://github.com/lampepfl/dotty/pull/16574) +- Fix static lazy field holder for GraalVM [#16800](https://github.com/lampepfl/dotty/pull/16800) +- Fix race condition in new LazyVals [#16975](https://github.com/lampepfl/dotty/pull/16975) + +## Typer + +- Drop requirement that self types are closed [#16648](https://github.com/lampepfl/dotty/pull/16648) +- Disallow constructor params from appearing in parent types for soundness [#16664](https://github.com/lampepfl/dotty/pull/16664) +- Don't search implicit arguments in singleton type prefix [#16490](https://github.com/lampepfl/dotty/pull/16490) +- Don't rely on isProvisional to determine whether atoms computed [#16489](https://github.com/lampepfl/dotty/pull/16489) +- Support signature polymorphic methods (`MethodHandle` and `VarHandle`) [#16225](https://github.com/lampepfl/dotty/pull/16225) +- Prefer parameterless alternatives during ambiguous overload resolution [#16315](https://github.com/lampepfl/dotty/pull/16315) +- Fix calculation to drop transparent classes [#16344](https://github.com/lampepfl/dotty/pull/16344) +- Test case for issue 16311 [#16317](https://github.com/lampepfl/dotty/pull/16317) +- Skip caching provisional OrType atoms [#16295](https://github.com/lampepfl/dotty/pull/16295) +- Avoid cyclic references due to experimental check when inlining [#16195](https://github.com/lampepfl/dotty/pull/16195) +- Track type variable dependencies to guide instantiation decisions [#16042](https://github.com/lampepfl/dotty/pull/16042) +- Two fixes to constraint solving [#16353](https://github.com/lampepfl/dotty/pull/16353) +- Fix regression in cyclic constraint handling [#16514](https://github.com/lampepfl/dotty/pull/16514) +- Sharpen range approximation for applied types with capture set ranges [#16261](https://github.com/lampepfl/dotty/pull/16261) +- Cut the Gordian Knot: Don't widen unions to transparent [#15642](https://github.com/lampepfl/dotty/pull/15642) +- Fix widening logic to keep instantiation within bounds [#16417](https://github.com/lampepfl/dotty/pull/16417) +- Skip ambiguous reference error when symbols are aliases [#16401](https://github.com/lampepfl/dotty/pull/16401) +- Avoid incorrect simplifications when updating bounds in the constraint [#16410](https://github.com/lampepfl/dotty/pull/16410) +- Take `@targetName` into account when resolving extension methods [#16487](https://github.com/lampepfl/dotty/pull/16487) +- Improve ClassTag handling to avoid invalid ClassTag generation and inference failure [#16492](https://github.com/lampepfl/dotty/pull/16492) +- Fix extracting the elemType of a union of arrays [#16569](https://github.com/lampepfl/dotty/pull/16569) +- Make sure annotations are typed in expression contexts [#16699](https://github.com/lampepfl/dotty/pull/16699) +- Throw a type error when using hk-types in unions or intersections [#16712](https://github.com/lampepfl/dotty/pull/16712) +- Add missing criterion to subtype check [#16889](https://github.com/lampepfl/dotty/pull/16889) +- Fix caching issue caused by incorrect isProvisional check [#16989](https://github.com/lampepfl/dotty/pull/16989) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.2.2..3.3.0` these are: + +``` + 226 Martin Odersky + 106 Szymon Rodziewicz + 81 Dale Wijnand + 56 Nicolas Stucki + 52 Paul Coral + 48 Kamil Szewczyk + 45 Paweł Marks + 28 Florian3k + 28 Yichen Xu + 15 Guillaume Martres + 10 Michał Pałka + 9 Kacper Korban + 8 Fengyun Liu + 7 Chris Birchall + 7 rochala + 6 Sébastien Doeraene + 6 jdudrak + 5 Seth Tisue + 5 Som Snytt + 5 nizhikov + 4 Filip Zybała + 4 Jan Chyb + 4 Michael Pollmeier + 4 Natsu Kagami + 3 Anatolii Kmetiuk + 3 Jamie Thompson + 2 Adrien Piquerez + 2 Alex + 2 Dmitrii Naumenko + 2 Lukas Rytz + 2 Michael Pilquist + 2 Vasil Vasilev + 2 adampauls + 2 yoshinorin + 1 Alexander Slesarenko + 1 Chris Kipp + 1 Guillaume Raffin + 1 Jakub Kozłowski + 1 Jan-Pieter van den Heuvel + 1 Julien Richard-Foy + 1 Kenji Yoshida + 1 Matt Bovel + 1 Mohammad Yousuf Minhaj Zia + 1 Philippus + 1 Szymon R + 1 Tim Spence + 1 s.bazarsadaev + + +``` \ No newline at end of file diff --git a/changelogs/3.3.1-RC1.md b/changelogs/3.3.1-RC1.md new file mode 100644 index 000000000000..e7d9f8f87ea9 --- /dev/null +++ b/changelogs/3.3.1-RC1.md @@ -0,0 +1,299 @@ +# Highlights of the release + +- Support records in JavaParsers [#16762](https://github.com/lampepfl/dotty/pull/16762) +- Port JVM backend refactor from Scala 2 [#15322](https://github.com/lampepfl/dotty/pull/15322) + +# Other changes and fixes + +## Backend + +- Disallow mixins where super calls bind to vals [#16908](https://github.com/lampepfl/dotty/pull/16908) +- Fix #15107: Avoid re-emitting a LineNumber after only LabelNodes. [#16813](https://github.com/lampepfl/dotty/pull/16813) + +## Coverage + +- Fix #17042: Preserve the shape of secondary ctors in instrumentCoverage. [#17111](https://github.com/lampepfl/dotty/pull/17111) + +## Default parameters + +- Dupe fix when finding default arg getters [#17058](https://github.com/lampepfl/dotty/pull/17058) + +## Documentation + +- Fix: ensure syntax blocks for ebnf are marked as such [#16837](https://github.com/lampepfl/dotty/pull/16837) + +## Erasure + +- Handle `@companionClass` and `@companionMethod` meta-annotations [#17091](https://github.com/lampepfl/dotty/pull/17091) + +## Extension Methods + +- Support extension methods imported from different objects [#17050](https://github.com/lampepfl/dotty/pull/17050) + +## GADTs + +- Fix tuple member selection so it works with GADT healing [#16766](https://github.com/lampepfl/dotty/pull/16766) +- Fix upper bound constraints, that are higher-kinded [#16744](https://github.com/lampepfl/dotty/pull/16744) +- Split out immutable GadtConstraint [#16602](https://github.com/lampepfl/dotty/pull/16602) + +## Implicits + +- Improve subtyping check for not yet eta-expanded higher kinded types [#17139](https://github.com/lampepfl/dotty/pull/17139) +- Harden tpd.Apply/TypeApply in case of errors [#16887](https://github.com/lampepfl/dotty/pull/16887) +- Try to be more subtle when inferring type parameters of class parents [#16896](https://github.com/lampepfl/dotty/pull/16896) +- Include `P` in the implicit scope of `P.this.type` [#17088](https://github.com/lampepfl/dotty/pull/17088) +- Do not compute `protoFormal` if `param.tpt` is empty [#18288](http://github.com/lampepfl/dotty/pull/18288) + +## Incremental Compilation + +- Fix under-compilation when the method type in a SAM changes [#16996](https://github.com/lampepfl/dotty/pull/16996) + +## Infrastructure + +- Set reference version to 3.3.0-RC6 [#17504](https://github.com/lampepfl/dotty/pull/17504) +- Fix #17119: Download Coursier from GitHub directly [#17141](https://github.com/lampepfl/dotty/pull/17141) + +## Inline + +- Remove NamedArg from inlined arguments [#17228](https://github.com/lampepfl/dotty/pull/17228) +- Don't generate a Select for a TermRef with NoPrefix [#16754](https://github.com/lampepfl/dotty/pull/16754) +- Prepare bodies of inline forwarders eagerly [#16757](https://github.com/lampepfl/dotty/pull/16757) +- Do not remove inline method implementations until PruneErasedDefs [#17408](https://github.com/lampepfl/dotty/pull/17408) + +## Java Interop + +- ClassfileParser: allow missing param names (for JDK 21) [#17536](https://github.com/lampepfl/dotty/pull/17536) + +## Linting + +- Improve -Wunused: locals, privates with unset vars warning #16639 [#17160](https://github.com/lampepfl/dotty/pull/17160) +- Fix wunused false positive when deriving alias type [#17157](https://github.com/lampepfl/dotty/pull/17157) +- Port `-Wnonunit-statement` setting for dotty [#16936](https://github.com/lampepfl/dotty/pull/16936) + +## Match Types + +- Fix #13757: Explicitly disallow higher-kinded scrutinees of match types. [#17322](https://github.com/lampepfl/dotty/pull/17322) +- Fix match type reduction with wildcard type arguments [#17065](https://github.com/lampepfl/dotty/pull/17065) +- Fix check whether classtag can be generated for match types [#16708](https://github.com/lampepfl/dotty/pull/16708) + +## Parser + +- Allow lines starting with `.` to fall outside previous indentation widths [#17056](https://github.com/lampepfl/dotty/pull/17056) + +## Pattern Matching + +- Fix #11541: Specialize ClassTag[T] in exhaustivity check [#17385](https://github.com/lampepfl/dotty/pull/17385) +- Check outer class prefixes in type projections when pattern matching [#17136](https://github.com/lampepfl/dotty/pull/17136) +- Make unchecked cases non-`@unchecked` and non-unreachable [#16958](https://github.com/lampepfl/dotty/pull/16958) +- Fix #16899: Better handle X instanceOf P where X is T1 | T2 [#17382](https://github.com/lampepfl/dotty/pull/17382) +- Fix regression in exhaustivity of HK types [#18303](http://github.com/lampepfl/dotty/pull/18303) + +## Pickling + +- ClassfileParser: Avoid cycle when accessing companion in inner class lookup [#16882](https://github.com/lampepfl/dotty/pull/16882) + +## Polyfunctions + +- Fix type aliases in beta-reduction of polyfunctions [#17054](https://github.com/lampepfl/dotty/pull/17054) + +## Quotes + +- Register `paramProxy` and `thisProxy` in `Quote` type [#17541](https://github.com/lampepfl/dotty/pull/17541) +- Only check newVal/newMethod privateWithin on -Xcheck-macros [#17437](https://github.com/lampepfl/dotty/pull/17437) +- Unencode quote and splice trees [#17342](https://github.com/lampepfl/dotty/pull/17342) +- Correctly type Expr.ofTupleFromSeq for arity > 22 [#17261](https://github.com/lampepfl/dotty/pull/17261) +- Use TermRef to distinguish distinct Type[T] instances [#17205](https://github.com/lampepfl/dotty/pull/17205) +- Check level consistency of SingletonTypeTree as a type [#17209](https://github.com/lampepfl/dotty/pull/17209) +- Fix splice type variable pattern detection [#17048](https://github.com/lampepfl/dotty/pull/17048) +- Avoid creation of `@SplicedType` quote local refrences [#17051](https://github.com/lampepfl/dotty/pull/17051) +- Dealias type references when healing types in quotes [#17049](https://github.com/lampepfl/dotty/pull/17049) +- Replace quoted type variables in signature of HOAS pattern result [#16951](https://github.com/lampepfl/dotty/pull/16951) +- Beta-reduce directly applied PolymorphicFunction [#16623](https://github.com/lampepfl/dotty/pull/16623) +- Use `Object.toString` for `quoted.{Expr, Type}` [#16663](https://github.com/lampepfl/dotty/pull/16663) +- Fix Splicer.isEscapedVariable [#16838](https://github.com/lampepfl/dotty/pull/16838) +- Fix references to class members defined in quotes [#17107](https://github.com/lampepfl/dotty/pull/17107) +- Handle pickled forward references in pickled expressions [#16855](https://github.com/lampepfl/dotty/pull/16855) +- Fix #16615 - crashes of path dependent types in spliced Type.of [#16773](https://github.com/lampepfl/dotty/pull/16773) +- Disallow local term references in staged types [#16362](https://github.com/lampepfl/dotty/pull/16362) +- Refactor level checking / type healing logic [#17082](https://github.com/lampepfl/dotty/pull/17082) +- Dealias quoted types when staging [#17059](https://github.com/lampepfl/dotty/pull/17059) +- Fix quotes with references to path dependent types [#17081](https://github.com/lampepfl/dotty/pull/17081) +- Make arguments order in quote hole deterministic [#17405](https://github.com/lampepfl/dotty/pull/17405) +- Only transform the body of the quote with QuoteTransformer [#17451](https://github.com/lampepfl/dotty/pull/17451) +- Place staged type captures in Quote AST [#17424](https://github.com/lampepfl/dotty/pull/17424) +- Add SplicePattern AST to parse and type quote pattern splices [#17396](https://github.com/lampepfl/dotty/pull/17396) +- Dealias types in `New`` before matching quotes [#17615](https://github.com/lampepfl/dotty/pull/17615) + +## Reflection + +- -Xcheck-macros: add hint when a symbol in created twice [#16733](https://github.com/lampepfl/dotty/pull/16733) +- Assert that symbols created using reflect API have correct privateWithin symbols [#17352](https://github.com/lampepfl/dotty/pull/17352) +- Fix reflect.LambdaType type test [#16972](https://github.com/lampepfl/dotty/pull/16972) +- Improve `New`/`Select` -Ycheck message [#16746](https://github.com/lampepfl/dotty/pull/16746) +- Improve error message for CyclicReference in macros [#16749](https://github.com/lampepfl/dotty/pull/16749) + +## REPL + +- Always load REPL classes in macros including the output directory [#16866](https://github.com/lampepfl/dotty/pull/16866) + +## Reporting + +- Improve missing argument list error [#17126](https://github.com/lampepfl/dotty/pull/17126) +- Improve implicit parameter error message with aliases [#17125](https://github.com/lampepfl/dotty/pull/17125) +- Improve "constructor proxy shadows outer" handling [#17154](https://github.com/lampepfl/dotty/pull/17154) +- Clarify ambiguous reference error message [#16137](https://github.com/lampepfl/dotty/pull/16137) +- Hint about forbidden combination of implicit values and conversions [#16735](https://github.com/lampepfl/dotty/pull/16735) +- Attach explanation message to diagnostic message [#16787](https://github.com/lampepfl/dotty/pull/16787) +- Propagate implicit search errors from implicit macros [#16840](https://github.com/lampepfl/dotty/pull/16840) +- Detail UnapplyInvalidReturnType error message [#17167](https://github.com/lampepfl/dotty/pull/17167) +- Add way to debug -Xcheck-macros tree checking [#16973](https://github.com/lampepfl/dotty/pull/16973) +- Enrich and finesse compiler crash reporting [#17031](https://github.com/lampepfl/dotty/pull/17031) +- Allow @implicitNotFound messages as explanations [#16893](https://github.com/lampepfl/dotty/pull/16893) +- Include top-level symbols from same file in outer ambiguity error [#17033](https://github.com/lampepfl/dotty/pull/17033) +- Do not issue deprecation warnings when declaring deprecated case classes [#17165](https://github.com/lampepfl/dotty/pull/17165) + +## Scala-JS + +- Fix #17344: Make implicit references to this above dynamic imports explicit. [#17357](https://github.com/lampepfl/dotty/pull/17357) +- Fix #12621: Better error message for JS trait ctor param. [#16811](https://github.com/lampepfl/dotty/pull/16811) +- Fix #16801: Handle Closure's of s.r.FunctionXXL. [#16809](https://github.com/lampepfl/dotty/pull/16809) +- Fix #17549: Unify how Memoize and Constructors decide what fields need storing. [#17560](https://github.com/lampepfl/dotty/pull/17560) + +## Scaladoc + +- Feat: Add a blog configuration with yaml [#17214](https://github.com/lampepfl/dotty/pull/17214) +- Don't render the "$" for module [#17302](https://github.com/lampepfl/dotty/pull/17302) +- Fix: Add scrollbar to the sidebar [#17203](https://github.com/lampepfl/dotty/pull/17203) +- Scaladoc: fix crash when processing extends call [#17260](https://github.com/lampepfl/dotty/pull/17260) +- Fix: Modify the CSS so that the logo of the generated documentation is adaptive [#17172](https://github.com/lampepfl/dotty/pull/17172) +- Fix: Remove the duplicate parameter when generating the scaladoc. [#17097](https://github.com/lampepfl/dotty/pull/17097) +- Fix: padding top in mobile version [#17019](https://github.com/lampepfl/dotty/pull/17019) +- Fix: tap target of the menu in Mobile version [#17018](https://github.com/lampepfl/dotty/pull/17018) +- Scaladoc: Fix expand icon not changing on anchor link [#17053](https://github.com/lampepfl/dotty/pull/17053) +- Scaladoc: fix inkuire generation for PolyTypes [#17129](https://github.com/lampepfl/dotty/pull/17129) +- Re port scroll bar [#17463](https://github.com/lampepfl/dotty/pull/17463) +- Handle empty files and truncated YAML front matter [#17527](https://github.com/lampepfl/dotty/pull/17527) + +## SemanticDB + +- Make sure symbol exists before calling owner [#16860](https://github.com/lampepfl/dotty/pull/16860) +- Support LambdaType (convert from HKTypeLambda) [#16056](https://github.com/lampepfl/dotty/pull/16056) + +## Specification + +- Apply `class-shadowing.md` to the Spec [#16839](https://github.com/lampepfl/dotty/pull/16839) +- Adding base for future Spec into the compiler repo [#16825](https://github.com/lampepfl/dotty/pull/16825) + +## Standard Library + +- Optimization: avoid NotGiven allocations [#17090](https://github.com/lampepfl/dotty/pull/17090) + +## Tooling + +- Disable `ExtractSemanticDB` phase when writing to output directory defined as JAR. [#16790](https://github.com/lampepfl/dotty/pull/16790) +- Print owner of bind symbol with -Yprint-debug-owners [#16854](https://github.com/lampepfl/dotty/pull/16854) +- Small fixes to allow using Metals with scaladoc with sbt [#16816](https://github.com/lampepfl/dotty/pull/16816) + +## Transform + +- Move CrossVersionChecks before FirstTransform [#17301](https://github.com/lampepfl/dotty/pull/17301) +- Fix needsOuterIfReferenced [#17159](https://github.com/lampepfl/dotty/pull/17159) +- Drop incorrect super accessor in trait subclass [#17062](https://github.com/lampepfl/dotty/pull/17062) +- Generate toString only for synthetic companions of case classes [#16890](https://github.com/lampepfl/dotty/pull/16890) +- Check trait constructor for accessibility even if not called at Typer [#17094](https://github.com/lampepfl/dotty/pull/17094) +- Fix #17435: A simpler fix [#17436](https://github.com/lampepfl/dotty/pull/17436) + +## Typer + +- Preserve type bounds for inlined definitions in posttyper [#17190](https://github.com/lampepfl/dotty/pull/17190) +- Change logic to find members of recursive types [#17386](https://github.com/lampepfl/dotty/pull/17386) +- Recognize named arguments in isFunctionWithUnknownParamType [#17161](https://github.com/lampepfl/dotty/pull/17161) +- Better comparisons for type projections [#17092](https://github.com/lampepfl/dotty/pull/17092) +- Allow selectDynamic and applyDynamic to be extension methods [#17106](https://github.com/lampepfl/dotty/pull/17106) +- Fix use of accessibleFrom when finding default arg getters [#16977](https://github.com/lampepfl/dotty/pull/16977) +- Map class literal constant types [#16988](https://github.com/lampepfl/dotty/pull/16988) +- Always use adapted type in withDenotation [#16901](https://github.com/lampepfl/dotty/pull/16901) +- Restrict captureWildcards to only be used if needed [#16799](https://github.com/lampepfl/dotty/pull/16799) +- Don't capture wildcards if in closure or by-name [#16732](https://github.com/lampepfl/dotty/pull/16732) +- Infer: Don't minimise to Nothing if there's an upper bound [#16786](https://github.com/lampepfl/dotty/pull/16786) +- Perform Matchable check only if type test is needed [#16824](https://github.com/lampepfl/dotty/pull/16824) +- Don't eta expand unary varargs methods [#16892](https://github.com/lampepfl/dotty/pull/16892) +- Fix beta-reduction with `Nothing` and `null` args [#16938](https://github.com/lampepfl/dotty/pull/16938) +- Generate kind-correct wildcards when selecting from a wildcard [#17025](https://github.com/lampepfl/dotty/pull/17025) +- Fix #16405 ctd - wildcards prematurely resolving to Nothing [#16764](https://github.com/lampepfl/dotty/pull/16764) +- Test: add regression test for #7790 [#17473](https://github.com/lampepfl/dotty/pull/17473) +- Properly handle `AnyVal`s as refinement members of `Selectable`s [#16286](https://github.com/lampepfl/dotty/pull/16286) +- Fix `accessibleType` for package object prefixes [#18057](https://github.com/lampepfl/dotty/pull/18057) +- Add clause for protected visibility from package objects [#18134](https://github.com/lampepfl/dotty/pull/18134) +- Revert "Include top-level symbols from same file in outer ambiguity error" [#17438](https://github.com/lampepfl/dotty/pull/17438) +- Heal stage inconsistent prefixes of type projections [#18239](https://github.com/lampepfl/dotty/pull/18239) +- Fix regression #17245: Overloaded methods with ClassTags [#18286](http://github.com/lampepfl/dotty/pull/18286) +- Disallow taking singleton types of packages again [#18232](http://github.com/lampepfl/dotty/pull/18232) +- A slightly more conservative version of #14218 [#18352](http://github.com/lampepfl/dotty/pull/18352) +- Record failures to adapt application arguments [#18269](http://github.com/lampepfl/dotty/pull/18269) +- Refine `infoDependsOnPrefix` [#18204](httpsF://github.com/lampepfl/dotty/pull/18204) +- Tweak selection from self types [#18467](https://github.com/lampepfl/dotty/pull/18467) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.0..3.3.1-RC1` these are: + +``` + 148 Nicolas Stucki + 65 Martin Odersky + 51 Szymon Rodziewicz + 49 Dale Wijnand + 49 Quentin Bernet + 38 Chris Kipp + 19 David Hua + 18 Lucas + 18 ysthakur + 15 Fengyun Liu + 15 Paweł Marks + 14 Guillaume Martres + 14 Jamie Thompson + 11 Sébastien Doeraene + 9 Timothée Andres + 8 Kacper Korban + 7 Matt Bovel + 7 Som Snytt + 6 Julien Richard-Foy + 6 Lucas Leblanc + 5 Michał Pałka + 4 Anatolii Kmetiuk + 4 Guillaume Raffin + 4 Paul Coral + 4 Wojciech Mazur + 4 Yichen Xu + 3 Decel + 3 Jan Chyb + 2 Adrien Piquerez + 2 Arman Bilge + 2 Carl + 2 Florian3k + 2 Kenji Yoshida + 2 Michael Pilquist + 2 Natsu Kagami + 2 Seth Tisue + 2 Tomasz Godzik + 2 Vasil Vasilev + 2 Yadu Krishnan + 1 Bersier + 1 Flavio Brasil + 1 Jan-Pieter van den Heuvel + 1 Lukas Rytz + 1 Miles Yucht + 1 Mohammad Yousuf Minhaj Zia + 1 Ondra Pelech + 1 Philippus + 1 Rikito Taniguchi + 1 Simon R + 1 brandonspark + 1 github-actions[bot] + 1 liang3zy22 + 1 s.bazarsadaev + 1 Łukasz Wroński + +``` diff --git a/changelogs/3.3.1-RC2.md b/changelogs/3.3.1-RC2.md new file mode 100644 index 000000000000..f21bfa074b66 --- /dev/null +++ b/changelogs/3.3.1-RC2.md @@ -0,0 +1,16 @@ +# Backported fixes + +- Dealias types in `New`` before matching quotes [#17615](https://github.com/lampepfl/dotty/pull/17615) +- Fix `accessibleType` for package object prefixes [#18057](https://github.com/lampepfl/dotty/pull/18057) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC1..3.3.1-RC2` these are: + +``` + 2 Martin Odersky + 2 Paweł Marks + 1 Nicolas Stucki +``` diff --git a/changelogs/3.3.1-RC3.md b/changelogs/3.3.1-RC3.md new file mode 100644 index 000000000000..eb19f40b10dc --- /dev/null +++ b/changelogs/3.3.1-RC3.md @@ -0,0 +1,15 @@ +# Backported fixes + +- Add clause for protected visibility from package objects [#18134](https://github.com/lampepfl/dotty/pull/18134) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC2..3.3.1-RC3` these are: + +``` + 2 Paweł Marks + 1 Nicolas Stucki + +``` diff --git a/changelogs/3.3.1-RC4.md b/changelogs/3.3.1-RC4.md new file mode 100644 index 000000000000..7d95e0258fad --- /dev/null +++ b/changelogs/3.3.1-RC4.md @@ -0,0 +1,15 @@ +# Backported fixes + +- Revert "Include top-level symbols from same file in outer ambiguity error" [#17438](https://github.com/lampepfl/dotty/pull/17438) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC3..3.3.1-RC4` these are: + +``` + 2 Paweł Marks + 1 Nicolas Stucki + +``` diff --git a/changelogs/3.3.1-RC5.md b/changelogs/3.3.1-RC5.md new file mode 100644 index 000000000000..e0bfc2a7fea8 --- /dev/null +++ b/changelogs/3.3.1-RC5.md @@ -0,0 +1,22 @@ +# Backported fixes + +- Heal stage inconsistent prefixes of type projections [#18239](https://github.com/lampepfl/dotty/pull/18239) +- Fix regression #17245: Overloaded methods with ClassTags [#18286](http://github.com/lampepfl/dotty/pull/18286) +- Disallow taking singleton types of packages again [#18232](http://github.com/lampepfl/dotty/pull/18232) +- A slightly more conservative version of #14218 [#18352](http://github.com/lampepfl/dotty/pull/18352) +- Record failures to adapt application arguments [#18269](http://github.com/lampepfl/dotty/pull/18269) +- Fix regression in exhaustivity of HK types [#18303](http://github.com/lampepfl/dotty/pull/18303) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC4..3.3.1-RC5` these are: + +``` + 5 Dale Wijnand + 2 Martin Odersky + 2 Paweł Marks + 1 Jan Chyb + 1 Nicolas Stucki +``` diff --git a/changelogs/3.3.1-RC6.md b/changelogs/3.3.1-RC6.md new file mode 100644 index 000000000000..96181855f1a0 --- /dev/null +++ b/changelogs/3.3.1-RC6.md @@ -0,0 +1,17 @@ +# Backported fixes + +- Refine `infoDependsOnPrefix` [#18204](https://github.com/lampepfl/dotty/pull/18204) +- Do not compute `protoFormal` if `param.tpt` is empty [#18288](http://github.com/lampepfl/dotty/pull/18288) +- Revert "Normalize match type usage during implicit lookup" [#18440](http://github.com/lampepfl/dotty/pull/18440) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC5..3.3.1-RC6` these are: + +``` + 3 Paweł Marks + 2 Martin Odersky + 1 Nicolas Stucki +``` diff --git a/changelogs/3.3.1-RC7.md b/changelogs/3.3.1-RC7.md new file mode 100644 index 000000000000..f8f093a18d11 --- /dev/null +++ b/changelogs/3.3.1-RC7.md @@ -0,0 +1,16 @@ +# Backported fixes + +- Tweak selection from self types [#18467](https://github.com/lampepfl/dotty/pull/18467) +- Revert "Add reflect `defn.FunctionClass` overloads" [#18473](http://github.com/lampepfl/dotty/pull/18473) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC6..3.3.1-RC7` these are: + +``` + 3 Paweł Marks + 1 Martin Odersky + +``` diff --git a/changelogs/3.3.1.md b/changelogs/3.3.1.md new file mode 100644 index 000000000000..5bbd6eb2861c --- /dev/null +++ b/changelogs/3.3.1.md @@ -0,0 +1,287 @@ +# Highlights of the release + +- Support records in JavaParsers [#16762](https://github.com/lampepfl/dotty/pull/16762) +- Port JVM backend refactor from Scala 2 [#15322](https://github.com/lampepfl/dotty/pull/15322) + +# Other changes and fixes + +## Backend + +- Disallow mixins where super calls bind to vals [#16908](https://github.com/lampepfl/dotty/pull/16908) +- Fix #15107: Avoid re-emitting a LineNumber after only LabelNodes. [#16813](https://github.com/lampepfl/dotty/pull/16813) + +## Coverage + +- Fix #17042: Preserve the shape of secondary ctors in instrumentCoverage. [#17111](https://github.com/lampepfl/dotty/pull/17111) + +## Default parameters + +- Dupe fix when finding default arg getters [#17058](https://github.com/lampepfl/dotty/pull/17058) + +## Documentation + +- Fix: ensure syntax blocks for ebnf are marked as such [#16837](https://github.com/lampepfl/dotty/pull/16837) + +## Erasure + +- Handle `@companionClass` and `@companionMethod` meta-annotations [#17091](https://github.com/lampepfl/dotty/pull/17091) + +## Extension Methods + +- Support extension methods imported from different objects [#17050](https://github.com/lampepfl/dotty/pull/17050) + +## GADTs + +- Fix tuple member selection so it works with GADT healing [#16766](https://github.com/lampepfl/dotty/pull/16766) +- Fix upper bound constraints, that are higher-kinded [#16744](https://github.com/lampepfl/dotty/pull/16744) +- Split out immutable GadtConstraint [#16602](https://github.com/lampepfl/dotty/pull/16602) + +## Implicits + +- Improve subtyping check for not yet eta-expanded higher kinded types [#17139](https://github.com/lampepfl/dotty/pull/17139) +- Harden tpd.Apply/TypeApply in case of errors [#16887](https://github.com/lampepfl/dotty/pull/16887) +- Try to be more subtle when inferring type parameters of class parents [#16896](https://github.com/lampepfl/dotty/pull/16896) +- Include `P` in the implicit scope of `P.this.type` [#17088](https://github.com/lampepfl/dotty/pull/17088) + +## Incremental Compilation + +- Fix under-compilation when the method type in a SAM changes [#16996](https://github.com/lampepfl/dotty/pull/16996) + +## Infrastructure + +- Set reference version to 3.3.0-RC6 [#17504](https://github.com/lampepfl/dotty/pull/17504) +- Fix #17119: Download Coursier from GitHub directly [#17141](https://github.com/lampepfl/dotty/pull/17141) + +## Inline + +- Remove NamedArg from inlined arguments [#17228](https://github.com/lampepfl/dotty/pull/17228) +- Don't generate a Select for a TermRef with NoPrefix [#16754](https://github.com/lampepfl/dotty/pull/16754) +- Prepare bodies of inline forwarders eagerly [#16757](https://github.com/lampepfl/dotty/pull/16757) +- Do not remove inline method implementations until PruneErasedDefs [#17408](https://github.com/lampepfl/dotty/pull/17408) + +## Java Interop + +- ClassfileParser: allow missing param names (for JDK 21) [#17536](https://github.com/lampepfl/dotty/pull/17536) + +## Linting + +- Improve -Wunused: locals, privates with unset vars warning #16639 [#17160](https://github.com/lampepfl/dotty/pull/17160) +- Fix wunused false positive when deriving alias type [#17157](https://github.com/lampepfl/dotty/pull/17157) +- Port `-Wnonunit-statement` setting for dotty [#16936](https://github.com/lampepfl/dotty/pull/16936) + +## Match Types + +- Normalize match type usage during implicit lookup [#17457](https://github.com/lampepfl/dotty/pull/17457) +- Fix #13757: Explicitly disallow higher-kinded scrutinees of match types. [#17322](https://github.com/lampepfl/dotty/pull/17322) +- Fix match type reduction with wildcard type arguments [#17065](https://github.com/lampepfl/dotty/pull/17065) +- Fix check whether classtag can be generated for match types [#16708](https://github.com/lampepfl/dotty/pull/16708) + +## Parser + +- Allow lines starting with `.` to fall outside previous indentation widths [#17056](https://github.com/lampepfl/dotty/pull/17056) + +## Pattern Matching + +- Fix #11541: Specialize ClassTag[T] in exhaustivity check [#17385](https://github.com/lampepfl/dotty/pull/17385) +- Check outer class prefixes in type projections when pattern matching [#17136](https://github.com/lampepfl/dotty/pull/17136) +- Make unchecked cases non-`@unchecked` and non-unreachable [#16958](https://github.com/lampepfl/dotty/pull/16958) +- Fix #16899: Better handle X instanceOf P where X is T1 | T2 [#17382](https://github.com/lampepfl/dotty/pull/17382) + +## Pickling + +- ClassfileParser: Avoid cycle when accessing companion in inner class lookup [#16882](https://github.com/lampepfl/dotty/pull/16882) + +## Polyfunctions + +- Fix type aliases in beta-reduction of polyfunctions [#17054](https://github.com/lampepfl/dotty/pull/17054) + +## Quotes + +- Register `paramProxy` and `thisProxy` in `Quote` type [#17541](https://github.com/lampepfl/dotty/pull/17541) +- Only check newVal/newMethod privateWithin on -Xcheck-macros [#17437](https://github.com/lampepfl/dotty/pull/17437) +- Unencode quote and splice trees [#17342](https://github.com/lampepfl/dotty/pull/17342) +- Correctly type Expr.ofTupleFromSeq for arity > 22 [#17261](https://github.com/lampepfl/dotty/pull/17261) +- Use TermRef to distinguish distinct Type[T] instances [#17205](https://github.com/lampepfl/dotty/pull/17205) +- Check level consistency of SingletonTypeTree as a type [#17209](https://github.com/lampepfl/dotty/pull/17209) +- Fix splice type variable pattern detection [#17048](https://github.com/lampepfl/dotty/pull/17048) +- Avoid creation of `@SplicedType` quote local refrences [#17051](https://github.com/lampepfl/dotty/pull/17051) +- Dealias type references when healing types in quotes [#17049](https://github.com/lampepfl/dotty/pull/17049) +- Replace quoted type variables in signature of HOAS pattern result [#16951](https://github.com/lampepfl/dotty/pull/16951) +- Beta-reduce directly applied PolymorphicFunction [#16623](https://github.com/lampepfl/dotty/pull/16623) +- Use `Object.toString` for `quoted.{Expr, Type}` [#16663](https://github.com/lampepfl/dotty/pull/16663) +- Fix Splicer.isEscapedVariable [#16838](https://github.com/lampepfl/dotty/pull/16838) +- Fix references to class members defined in quotes [#17107](https://github.com/lampepfl/dotty/pull/17107) +- Handle pickled forward references in pickled expressions [#16855](https://github.com/lampepfl/dotty/pull/16855) +- Fix #16615 - crashes of path dependent types in spliced Type.of [#16773](https://github.com/lampepfl/dotty/pull/16773) +- Disallow local term references in staged types [#16362](https://github.com/lampepfl/dotty/pull/16362) +- Refactor level checking / type healing logic [#17082](https://github.com/lampepfl/dotty/pull/17082) +- Dealias quoted types when staging [#17059](https://github.com/lampepfl/dotty/pull/17059) +- Fix quotes with references to path dependent types [#17081](https://github.com/lampepfl/dotty/pull/17081) +- Make arguments order in quote hole deterministic [#17405](https://github.com/lampepfl/dotty/pull/17405) +- Only transform the body of the quote with QuoteTransformer [#17451](https://github.com/lampepfl/dotty/pull/17451) +- Place staged type captures in Quote AST [#17424](https://github.com/lampepfl/dotty/pull/17424) +- Add SplicePattern AST to parse and type quote pattern splices [#17396](https://github.com/lampepfl/dotty/pull/17396) + +## Reflection + +- -Xcheck-macros: add hint when a symbol in created twice [#16733](https://github.com/lampepfl/dotty/pull/16733) +- Assert that symbols created using reflect API have correct privateWithin symbols [#17352](https://github.com/lampepfl/dotty/pull/17352) +- Fix reflect.LambdaType type test [#16972](https://github.com/lampepfl/dotty/pull/16972) +- Improve `New`/`Select` -Ycheck message [#16746](https://github.com/lampepfl/dotty/pull/16746) +- Improve error message for CyclicReference in macros [#16749](https://github.com/lampepfl/dotty/pull/16749) +- Add reflect `defn.FunctionClass` overloads [#16849](https://github.com/lampepfl/dotty/pull/16849) + +## REPL + +- Always load REPL classes in macros including the output directory [#16866](https://github.com/lampepfl/dotty/pull/16866) + +## Reporting + +- Improve missing argument list error [#17126](https://github.com/lampepfl/dotty/pull/17126) +- Improve implicit parameter error message with aliases [#17125](https://github.com/lampepfl/dotty/pull/17125) +- Improve "constructor proxy shadows outer" handling [#17154](https://github.com/lampepfl/dotty/pull/17154) +- Clarify ambiguous reference error message [#16137](https://github.com/lampepfl/dotty/pull/16137) +- Hint about forbidden combination of implicit values and conversions [#16735](https://github.com/lampepfl/dotty/pull/16735) +- Attach explanation message to diagnostic message [#16787](https://github.com/lampepfl/dotty/pull/16787) +- Propagate implicit search errors from implicit macros [#16840](https://github.com/lampepfl/dotty/pull/16840) +- Detail UnapplyInvalidReturnType error message [#17167](https://github.com/lampepfl/dotty/pull/17167) +- Add way to debug -Xcheck-macros tree checking [#16973](https://github.com/lampepfl/dotty/pull/16973) +- Enrich and finesse compiler crash reporting [#17031](https://github.com/lampepfl/dotty/pull/17031) +- Allow @implicitNotFound messages as explanations [#16893](https://github.com/lampepfl/dotty/pull/16893) +- Include top-level symbols from same file in outer ambiguity error [#17033](https://github.com/lampepfl/dotty/pull/17033) +- Do not issue deprecation warnings when declaring deprecated case classes [#17165](https://github.com/lampepfl/dotty/pull/17165) + +## Scala-JS + +- Fix #17344: Make implicit references to this above dynamic imports explicit. [#17357](https://github.com/lampepfl/dotty/pull/17357) +- Fix #12621: Better error message for JS trait ctor param. [#16811](https://github.com/lampepfl/dotty/pull/16811) +- Fix #16801: Handle Closure's of s.r.FunctionXXL. [#16809](https://github.com/lampepfl/dotty/pull/16809) +- Fix #17549: Unify how Memoize and Constructors decide what fields need storing. [#17560](https://github.com/lampepfl/dotty/pull/17560) + +## Scaladoc + +- Feat: Add a blog configuration with yaml [#17214](https://github.com/lampepfl/dotty/pull/17214) +- Don't render the "$" for module [#17302](https://github.com/lampepfl/dotty/pull/17302) +- Fix: Add scrollbar to the sidebar [#17203](https://github.com/lampepfl/dotty/pull/17203) +- Scaladoc: fix crash when processing extends call [#17260](https://github.com/lampepfl/dotty/pull/17260) +- Fix: Modify the CSS so that the logo of the generated documentation is adaptive [#17172](https://github.com/lampepfl/dotty/pull/17172) +- Fix: Remove the duplicate parameter when generating the scaladoc. [#17097](https://github.com/lampepfl/dotty/pull/17097) +- Fix: padding top in mobile version [#17019](https://github.com/lampepfl/dotty/pull/17019) +- Fix: tap target of the menu in Mobile version [#17018](https://github.com/lampepfl/dotty/pull/17018) +- Scaladoc: Fix expand icon not changing on anchor link [#17053](https://github.com/lampepfl/dotty/pull/17053) +- Scaladoc: fix inkuire generation for PolyTypes [#17129](https://github.com/lampepfl/dotty/pull/17129) +- Re port scroll bar [#17463](https://github.com/lampepfl/dotty/pull/17463) +- Handle empty files and truncated YAML front matter [#17527](https://github.com/lampepfl/dotty/pull/17527) + +## SemanticDB + +- Make sure symbol exists before calling owner [#16860](https://github.com/lampepfl/dotty/pull/16860) +- Support LambdaType (convert from HKTypeLambda) [#16056](https://github.com/lampepfl/dotty/pull/16056) + +## Specification + +- Apply `class-shadowing.md` to the Spec [#16839](https://github.com/lampepfl/dotty/pull/16839) +- Adding base for future Spec into the compiler repo [#16825](https://github.com/lampepfl/dotty/pull/16825) + +## Standard Library + +- Optimization: avoid NotGiven allocations [#17090](https://github.com/lampepfl/dotty/pull/17090) + +## Tooling + +- Disable `ExtractSemanticDB` phase when writing to output directory defined as JAR. [#16790](https://github.com/lampepfl/dotty/pull/16790) +- Print owner of bind symbol with -Yprint-debug-owners [#16854](https://github.com/lampepfl/dotty/pull/16854) +- Small fixes to allow using Metals with scaladoc with sbt [#16816](https://github.com/lampepfl/dotty/pull/16816) + +## Transform + +- Move CrossVersionChecks before FirstTransform [#17301](https://github.com/lampepfl/dotty/pull/17301) +- Fix needsOuterIfReferenced [#17159](https://github.com/lampepfl/dotty/pull/17159) +- Drop incorrect super accessor in trait subclass [#17062](https://github.com/lampepfl/dotty/pull/17062) +- Generate toString only for synthetic companions of case classes [#16890](https://github.com/lampepfl/dotty/pull/16890) +- Check trait constructor for accessibility even if not called at Typer [#17094](https://github.com/lampepfl/dotty/pull/17094) +- Fix #17435: A simpler fix [#17436](https://github.com/lampepfl/dotty/pull/17436) + +## Typer + +- Preserve type bounds for inlined definitions in posttyper [#17190](https://github.com/lampepfl/dotty/pull/17190) +- Change logic to find members of recursive types [#17386](https://github.com/lampepfl/dotty/pull/17386) +- Recognize named arguments in isFunctionWithUnknownParamType [#17161](https://github.com/lampepfl/dotty/pull/17161) +- Better comparisons for type projections [#17092](https://github.com/lampepfl/dotty/pull/17092) +- Allow selectDynamic and applyDynamic to be extension methods [#17106](https://github.com/lampepfl/dotty/pull/17106) +- Fix use of accessibleFrom when finding default arg getters [#16977](https://github.com/lampepfl/dotty/pull/16977) +- Map class literal constant types [#16988](https://github.com/lampepfl/dotty/pull/16988) +- Always use adapted type in withDenotation [#16901](https://github.com/lampepfl/dotty/pull/16901) +- Restrict captureWildcards to only be used if needed [#16799](https://github.com/lampepfl/dotty/pull/16799) +- Don't capture wildcards if in closure or by-name [#16732](https://github.com/lampepfl/dotty/pull/16732) +- Infer: Don't minimise to Nothing if there's an upper bound [#16786](https://github.com/lampepfl/dotty/pull/16786) +- Perform Matchable check only if type test is needed [#16824](https://github.com/lampepfl/dotty/pull/16824) +- Don't eta expand unary varargs methods [#16892](https://github.com/lampepfl/dotty/pull/16892) +- Fix beta-reduction with `Nothing` and `null` args [#16938](https://github.com/lampepfl/dotty/pull/16938) +- Generate kind-correct wildcards when selecting from a wildcard [#17025](https://github.com/lampepfl/dotty/pull/17025) +- Fix #16405 ctd - wildcards prematurely resolving to Nothing [#16764](https://github.com/lampepfl/dotty/pull/16764) +- Test: add regression test for #7790 [#17473](https://github.com/lampepfl/dotty/pull/17473) +- Properly handle `AnyVal`s as refinement members of `Selectable`s [#16286](https://github.com/lampepfl/dotty/pull/16286) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.0..3.3.1` these are: + +``` + 152 Nicolas Stucki + 73 Martin Odersky + 54 Dale Wijnand + 51 Szymon Rodziewicz + 49 Quentin Bernet + 38 Chris Kipp + 31 Paweł Marks + 19 David Hua + 18 Lucas + 18 ysthakur + 15 Fengyun Liu + 14 Guillaume Martres + 14 Jamie Thompson + 11 Sébastien Doeraene + 9 Timothée Andres + 8 Kacper Korban + 7 Matt Bovel + 7 Som Snytt + 6 Julien Richard-Foy + 6 Lucas Leblanc + 5 Michał Pałka + 4 Anatolii Kmetiuk + 4 Guillaume Raffin + 4 Jan Chyb + 4 Paul Coral + 4 Wojciech Mazur + 4 Yichen Xu + 3 Decel + 2 Adrien Piquerez + 2 Arman Bilge + 2 Carl + 2 Florian3k + 2 Kenji Yoshida + 2 Michael Pilquist + 2 Natsu Kagami + 2 Seth Tisue + 2 Tomasz Godzik + 2 Vasil Vasilev + 2 Yadu Krishnan + 1 Bersier + 1 Flavio Brasil + 1 Jan-Pieter van den Heuvel + 1 Lukas Rytz + 1 Miles Yucht + 1 Mohammad Yousuf Minhaj Zia + 1 Ondra Pelech + 1 Philippus + 1 Rikito Taniguchi + 1 Simon R + 1 brandonspark + 1 github-actions[bot] + 1 liang3zy22 + 1 s.bazarsadaev + 1 Łukasz Wroński +``` diff --git a/community-build/community-projects/betterfiles b/community-build/community-projects/betterfiles index 0ab941360880..d098f2799092 160000 --- a/community-build/community-projects/betterfiles +++ b/community-build/community-projects/betterfiles @@ -1 +1 @@ -Subproject commit 0ab941360880095419183309b0b9b3363eb1ad00 +Subproject commit d098f279909246243643ba3b85f3520a24c377af diff --git a/community-build/community-projects/cats-effect-3 b/community-build/community-projects/cats-effect-3 index 3a32c0e5b7b6..1d425e6efdf8 160000 --- a/community-build/community-projects/cats-effect-3 +++ b/community-build/community-projects/cats-effect-3 @@ -1 +1 @@ -Subproject commit 3a32c0e5b7b61665e5bb94ccf0ed92beb66615dd +Subproject commit 1d425e6efdf8aee619a4a906e950473c51f78161 diff --git a/community-build/community-projects/cats-mtl b/community-build/community-projects/cats-mtl index 149f002c8774..0ab7aa1cc8a0 160000 --- a/community-build/community-projects/cats-mtl +++ b/community-build/community-projects/cats-mtl @@ -1 +1 @@ -Subproject commit 149f002c8774b61df87cb846455d94ae858b3b54 +Subproject commit 0ab7aa1cc8a087693b2b04c8a9cb63f69f4af54a diff --git a/community-build/community-projects/fs2 b/community-build/community-projects/fs2 index ac5275baf33b..6d7c6d6924cb 160000 --- a/community-build/community-projects/fs2 +++ b/community-build/community-projects/fs2 @@ -1 +1 @@ -Subproject commit ac5275baf33b03da0a461b5de735ee6a1f5a524e +Subproject commit 6d7c6d6924cb055028458ac8236622190acf66d1 diff --git a/community-build/community-projects/http4s b/community-build/community-projects/http4s index c3d46f561ed1..aa85f5f2e660 160000 --- a/community-build/community-projects/http4s +++ b/community-build/community-projects/http4s @@ -1 +1 @@ -Subproject commit c3d46f561ed1026ae54e1acbd5e4730f0498ea93 +Subproject commit aa85f5f2e660d1d4370d90316333718fd6517051 diff --git a/community-build/community-projects/play-json b/community-build/community-projects/play-json index 356b7044ed3e..b2b7f8b834a4 160000 --- a/community-build/community-projects/play-json +++ b/community-build/community-projects/play-json @@ -1 +1 @@ -Subproject commit 356b7044ed3efd6cf9350eb9930be6abd4906b6e +Subproject commit b2b7f8b834a405ec6ba5455dc345b754fab21e8f diff --git a/community-build/community-projects/protoquill b/community-build/community-projects/protoquill index 16d26fcb3072..494c2ddc06e7 160000 --- a/community-build/community-projects/protoquill +++ b/community-build/community-projects/protoquill @@ -1 +1 @@ -Subproject commit 16d26fcb30720b9aa81d29f08b9da10916e269a2 +Subproject commit 494c2ddc06e71f1c7f13b382675525130feee9a0 diff --git a/community-build/community-projects/requests-scala b/community-build/community-projects/requests-scala index 6d4a223bc33d..8e4a40588491 160000 --- a/community-build/community-projects/requests-scala +++ b/community-build/community-projects/requests-scala @@ -1 +1 @@ -Subproject commit 6d4a223bc33def14ae9a4def24a3f5c258451e8e +Subproject commit 8e4a40588491608aa40099f79c881d54a5094e75 diff --git a/community-build/community-projects/scala-parallel-collections b/community-build/community-projects/scala-parallel-collections index a6bd648bb188..7d0e41ae4d09 160000 --- a/community-build/community-projects/scala-parallel-collections +++ b/community-build/community-projects/scala-parallel-collections @@ -1 +1 @@ -Subproject commit a6bd648bb188a65ab36be07e956e52fe25f64d67 +Subproject commit 7d0e41ae4d09e1ddf063651e377921ec493fc5bf diff --git a/community-build/community-projects/scalaz b/community-build/community-projects/scalaz index ee85b0925809..6e7f3d9caf64 160000 --- a/community-build/community-projects/scalaz +++ b/community-build/community-projects/scalaz @@ -1 +1 @@ -Subproject commit ee85b0925809f6e04808a6124ae04dd89adba0d6 +Subproject commit 6e7f3d9caf64d8ad1c82804cf418882345f41930 diff --git a/community-build/community-projects/specs2 b/community-build/community-projects/specs2 index 2bfe446a4e91..789f23b75db1 160000 --- a/community-build/community-projects/specs2 +++ b/community-build/community-projects/specs2 @@ -1 +1 @@ -Subproject commit 2bfe446a4e9122b1122a7e13a3d100b3749b8630 +Subproject commit 789f23b75db1cf7961d04468b21a2cc0d7ba32d8 diff --git a/community-build/community-projects/spire b/community-build/community-projects/spire index 7f630c0209e3..bc524eeea735 160000 --- a/community-build/community-projects/spire +++ b/community-build/community-projects/spire @@ -1 +1 @@ -Subproject commit 7f630c0209e327bdc782ade2210d8e4b916fddcc +Subproject commit bc524eeea735a3cf4d5108039f95950b024a14e4 diff --git a/community-build/community-projects/stdLib213 b/community-build/community-projects/stdLib213 index 986dcc160aab..1a2521996bad 160000 --- a/community-build/community-projects/stdLib213 +++ b/community-build/community-projects/stdLib213 @@ -1 +1 @@ -Subproject commit 986dcc160aab85298f6cab0bf8dd0345497cdc01 +Subproject commit 1a2521996badfe4cb3d9b8cdecefacb1251faeb9 diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index 52155189a31f..1349c3adc3b9 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -140,7 +140,7 @@ final case class SbtCommunityProject( case Some(ivyHome) => List(s"-Dsbt.ivy.home=$ivyHome") case _ => Nil extraSbtArgs ++ sbtProps ++ List( - "-sbt-version", "1.7.1", + "-sbt-version", "1.8.2", "-Dsbt.supershell=false", s"-Ddotty.communitybuild.dir=$communitybuildDir", s"--addPluginSbtFile=$sbtPluginFilePath" diff --git a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala index 146ad6f4f951..bf6b6d431509 100644 --- a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala +++ b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala @@ -94,6 +94,7 @@ class CommunityBuildTestC: @Test def shapeless = projects.shapeless.run() @Test def sourcecode = projects.sourcecode.run() @Test def specs2 = projects.specs2.run() + @Test def stdLib213 = projects.stdLib213.run() @Test def ujson = projects.ujson.run() @Test def upickle = projects.upickle.run() diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index 3e2a8f1b0b60..e7b5a0dad1bf 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -4,7 +4,7 @@ package jvm import scala.language.unsafeNulls -import scala.annotation.switch +import scala.annotation.{switch, tailrec} import scala.collection.mutable.SortedMap import scala.tools.asm @@ -23,6 +23,7 @@ import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.util.Spans._ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Phases._ +import dotty.tools.dotc.core.Decorators.em import dotty.tools.dotc.report /* @@ -78,9 +79,14 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { tree match { case Assign(lhs @ DesugaredSelect(qual, _), rhs) => + val savedStackHeight = stackHeight val isStatic = lhs.symbol.isStaticMember - if (!isStatic) { genLoadQualifier(lhs) } + if (!isStatic) { + genLoadQualifier(lhs) + stackHeight += 1 + } genLoad(rhs, symInfoTK(lhs.symbol)) + stackHeight = savedStackHeight lineNumber(tree) // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError val receiverClass = qual.tpe.typeSymbol @@ -144,7 +150,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } genLoad(larg, resKind) + stackHeight += resKind.size genLoad(rarg, if (isShift) INT else resKind) + stackHeight -= resKind.size (code: @switch) match { case ADD => bc add resKind @@ -181,14 +189,19 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { if (isArrayGet(code)) { // load argument on stack assert(args.length == 1, s"Too many arguments for array get operation: $tree"); + stackHeight += 1 genLoad(args.head, INT) + stackHeight -= 1 generatedType = k.asArrayBType.componentType bc.aload(elementType) } else if (isArraySet(code)) { val List(a1, a2) = args + stackHeight += 1 genLoad(a1, INT) + stackHeight += 1 genLoad(a2) + stackHeight -= 2 generatedType = UNIT bc.astore(elementType) } else { @@ -222,7 +235,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val resKind = if (hasUnitBranch) UNIT else tpeTK(tree) val postIf = new asm.Label - genLoadTo(thenp, resKind, LoadDestination.Jump(postIf)) + genLoadTo(thenp, resKind, LoadDestination.Jump(postIf, stackHeight)) markProgramPoint(failure) genLoadTo(elsep, resKind, LoadDestination.FallThrough) markProgramPoint(postIf) @@ -481,7 +494,17 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { dest match case LoadDestination.FallThrough => () - case LoadDestination.Jump(label) => + case LoadDestination.Jump(label, targetStackHeight) => + if targetStackHeight < stackHeight then + val stackDiff = stackHeight - targetStackHeight + if expectedType == UNIT then + bc dropMany stackDiff + else + val loc = locals.makeTempLocal(expectedType) + bc.store(loc.idx, expectedType) + bc dropMany stackDiff + bc.load(loc.idx, expectedType) + end if bc goTo label case LoadDestination.Return => bc emitRETURN returnType @@ -576,7 +599,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { if dest == LoadDestination.FallThrough then val resKind = tpeTK(tree) val jumpTarget = new asm.Label - registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget)) + registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget, stackHeight)) genLoad(expr, resKind) markProgramPoint(jumpTarget) resKind @@ -634,7 +657,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { markProgramPoint(loop) if isInfinite then - val dest = LoadDestination.Jump(loop) + val dest = LoadDestination.Jump(loop, stackHeight) genLoadTo(body, UNIT, dest) dest else @@ -649,7 +672,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val failure = new asm.Label genCond(cond, success, failure, targetIfNoJump = success) markProgramPoint(success) - genLoadTo(body, UNIT, LoadDestination.Jump(loop)) + genLoadTo(body, UNIT, LoadDestination.Jump(loop, stackHeight)) markProgramPoint(failure) end match LoadDestination.FallThrough @@ -700,7 +723,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { var elemKind = arr.elementType val argsSize = args.length if (argsSize > dims) { - report.error(s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)", ctx.source.atSpan(app.span)) + report.error(em"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)", ctx.source.atSpan(app.span)) } if (argsSize < dims) { /* In one step: @@ -743,7 +766,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { // scala/bug#10290: qual can be `this.$outer()` (not just `this`), so we call genLoad (not just ALOAD_0) genLoad(superQual) + stackHeight += 1 genLoadArguments(args, paramTKs(app)) + stackHeight -= 1 generatedType = genCallMethod(fun.symbol, InvokeStyle.Super, app.span) // 'new' constructor call: Note: since constructors are @@ -765,7 +790,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { assert(classBTypeFromSymbol(ctor.owner) == rt, s"Symbol ${ctor.owner.showFullName} is different from $rt") mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName) bc dup generatedType + stackHeight += 2 genLoadArguments(args, paramTKs(app)) + stackHeight -= 2 genCallMethod(ctor, InvokeStyle.Special, app.span) case _ => @@ -798,8 +825,12 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { else if (app.hasAttachment(BCodeHelpers.UseInvokeSpecial)) InvokeStyle.Special else InvokeStyle.Virtual - if (invokeStyle.hasInstance) genLoadQualifier(fun) + val savedStackHeight = stackHeight + if invokeStyle.hasInstance then + genLoadQualifier(fun) + stackHeight += 1 genLoadArguments(args, paramTKs(app)) + stackHeight = savedStackHeight val DesugaredSelect(qual, name) = fun: @unchecked // fun is a Select, also checked in genLoadQualifier val isArrayClone = name == nme.clone_ && qual.tpe.widen.isInstanceOf[JavaArrayType] @@ -857,6 +888,8 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { bc iconst elems.length bc newarray elmKind + stackHeight += 3 // during the genLoad below, there is the result, its dup, and the index + var i = 0 var rest = elems while (!rest.isEmpty) { @@ -868,6 +901,8 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { i = i + 1 } + stackHeight -= 3 + generatedType } @@ -882,7 +917,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val (generatedType, postMatch, postMatchDest) = if dest == LoadDestination.FallThrough then val postMatch = new asm.Label - (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch)) + (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch, stackHeight)) else (expectedType, null, dest) @@ -1159,14 +1194,21 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } def genLoadArguments(args: List[Tree], btpes: List[BType]): Unit = - args match - case arg :: args1 => - btpes match - case btpe :: btpes1 => - genLoad(arg, btpe) - genLoadArguments(args1, btpes1) - case _ => - case _ => + @tailrec def loop(args: List[Tree], btpes: List[BType]): Unit = + args match + case arg :: args1 => + btpes match + case btpe :: btpes1 => + genLoad(arg, btpe) + stackHeight += btpe.size + loop(args1, btpes1) + case _ => + case _ => + + val savedStackHeight = stackHeight + loop(args, btpes) + stackHeight = savedStackHeight + end genLoadArguments def genLoadModule(tree: Tree): BType = { val module = ( @@ -1255,7 +1297,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { .toList // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower - if (classfileVersion < asm.Opcodes.V9) { + if (backendUtils.classfileVersion < asm.Opcodes.V9) { // Estimate capacity needed for the string builder val approxBuilderSize = concatArguments.view.map { @@ -1265,11 +1307,14 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { }.sum bc.genNewStringBuilder(approxBuilderSize) + stackHeight += 1 // during the genLoad below, there is a reference to the StringBuilder on the stack for (elem <- concatArguments) { val elemType = tpeTK(elem) genLoad(elem, elemType) bc.genStringBuilderAppend(elemType) } + stackHeight -= 1 + bc.genStringBuilderEnd } else { @@ -1286,12 +1331,15 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { var totalArgSlots = 0 var countConcats = 1 // ie. 1 + how many times we spilled + val savedStackHeight = stackHeight + for (elem <- concatArguments) { val tpe = tpeTK(elem) val elemSlots = tpe.size // Unlikely spill case if (totalArgSlots + elemSlots >= MaxIndySlots) { + stackHeight = savedStackHeight + countConcats bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) countConcats += 1 totalArgSlots = 0 @@ -1316,8 +1364,10 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val tpe = tpeTK(elem) argTypes += tpe.toASMType genLoad(elem, tpe) + stackHeight += 1 } } + stackHeight = savedStackHeight bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) // If we spilled, generate one final concat @@ -1512,7 +1562,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } else { val tk = tpeTK(l).maxType(tpeTK(r)) genLoad(l, tk) + stackHeight += tk.size genLoad(r, tk) + stackHeight -= tk.size genCJUMP(success, failure, op, tk, targetIfNoJump) } } @@ -1627,7 +1679,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } genLoad(l, ObjectRef) + stackHeight += 1 genLoad(r, ObjectRef) + stackHeight -= 1 genCallMethod(equalsMethod, InvokeStyle.Static) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) } @@ -1643,7 +1697,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } else if (isNonNullExpr(l)) { // SI-7852 Avoid null check if L is statically non-null. genLoad(l, ObjectRef) + stackHeight += 1 genLoad(r, ObjectRef) + stackHeight -= 1 genCallMethod(defn.Any_equals, InvokeStyle.Virtual) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) } else { @@ -1653,7 +1709,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val lNonNull = new asm.Label genLoad(l, ObjectRef) + stackHeight += 1 genLoad(r, ObjectRef) + stackHeight -= 1 locals.store(eqEqTempLocal) bc dup ObjectRef genCZJUMP(lNull, lNonNull, Primitives.EQ, ObjectRef, targetIfNoJump = lNull) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index b6d898b3b221..c36c8c546635 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -42,18 +42,19 @@ import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions * @version 1.0 * */ -trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { +trait BCodeHelpers extends BCodeIdiomatic { // for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce - //import global._ - //import bTypes._ - //import coreBTypes._ import bTypes._ import tpd._ import coreBTypes._ import int.{_, given} import DottyBackendInterface._ + // We need to access GenBCode phase to get access to post-processor components. + // At this point it should always be initialized already. + protected lazy val backendUtils = genBCodePhase.asInstanceOf[GenBCode].postProcessor.backendUtils + def ScalaATTRName: String = "Scala" def ScalaSignatureATTRName: String = "ScalaSig" @@ -61,100 +62,15 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { @threadUnsafe lazy val AnnotationRetentionSourceAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("SOURCE") @threadUnsafe lazy val AnnotationRetentionClassAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("CLASS") @threadUnsafe lazy val AnnotationRetentionRuntimeAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("RUNTIME") - @threadUnsafe lazy val JavaAnnotationClass: ClassSymbol = requiredClass("java.lang.annotation.Annotation") val bCodeAsmCommon: BCodeAsmCommon[int.type] = new BCodeAsmCommon(int) - /* - * must-single-thread - */ - def getFileForClassfile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - getFile(base, clsName, suffix) - } - - /* - * must-single-thread - */ - def getOutFolder(csym: Symbol, cName: String): AbstractFile = { - try { - outputDirectory - } catch { - case ex: Throwable => - report.error(s"Couldn't create file for class $cName\n${ex.getMessage}", ctx.source.atSpan(csym.span)) - null - } - } - final def traitSuperAccessorName(sym: Symbol): String = { val nameString = sym.javaSimpleName.toString if (sym.name == nme.TRAIT_CONSTRUCTOR) nameString else nameString + "$" } - // ----------------------------------------------------------------------------------------- - // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) - // Background: - // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf - // http://comments.gmane.org/gmane.comp.java.vm.languages/2293 - // https://issues.scala-lang.org/browse/SI-3872 - // ----------------------------------------------------------------------------------------- - - /* An `asm.ClassWriter` that uses `jvmWiseLUB()` - * The internal name of the least common ancestor of the types given by inameA and inameB. - * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow - */ - final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) { - - /** - * This method is thread-safe: it depends only on the BTypes component, which does not depend - * on global. TODO @lry move to a different place where no global is in scope, on bTypes. - */ - override def getCommonSuperClass(inameA: String, inameB: String): String = { - val a = classBTypeFromInternalName(inameA) - val b = classBTypeFromInternalName(inameB) - val lub = a.jvmWiseLUB(b) - val lubName = lub.internalName - assert(lubName != "scala/Any") - lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. - } - } - - /* - * must-single-thread - */ - def initBytecodeWriter(): BytecodeWriter = { - (None: Option[AbstractFile] /*getSingleOutput*/) match { // todo: implement - case Some(f) if f.hasExtension("jar") => - new DirectToJarfileWriter(f.file) - case _ => - factoryNonJarBytecodeWriter() - } - } - - /* - * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner - * classes in BTypes.scala. - * - * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of - * each inner class it lists (those are looked up and included). - * - * This method serializes in the InnerClasses JVM attribute in an appropriate order, - * not necessarily that given by `refedInnerClasses`. - * - * can-multi-thread - */ - final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { - // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler - val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) - allNestedClasses ++= declaredInnerClasses - refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) - for nestedClass <- allNestedClasses - do { - // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. - val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked - jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) - } - } /* * can-multi-thread @@ -415,7 +331,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { arrAnnotV.visitEnd() } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape. */ - case t @ Apply(constr, args) if t.tpe.derivesFrom(JavaAnnotationClass) => + case t @ Apply(constr, args) if t.tpe.classSymbol.is(JavaAnnotation) => val typ = t.tpe.classSymbol.denot.info val assocs = assocsFromApply(t) val desc = innerClasesStore.typeDescriptor(typ) // the class descriptor of the nested annotation class @@ -423,7 +339,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { emitAssocs(nestedVisitor, assocs, bcodeStore)(innerClasesStore) case t => - report.error(ex"Annotation argument is not a constant", t.sourcePos) + report.error(em"Annotation argument is not a constant", t.sourcePos) } } @@ -681,7 +597,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { val mirrorClass = new asm.tree.ClassNode mirrorClass.visit( - classfileVersion, + backendUtils.classfileVersion, bType.info.flags, mirrorName, null /* no java-generic-signature */, @@ -872,10 +788,11 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { try body catch { case ex: Throwable => - report.error(i"""|compiler bug: created invalid generic signature for $sym in ${sym.denot.owner.showFullName} - |signature: $sig - |if this is reproducible, please report bug at https://github.com/lampepfl/dotty/issues - """.trim, sym.sourcePos) + report.error( + em"""|compiler bug: created invalid generic signature for $sym in ${sym.denot.owner.showFullName} + |signature: $sig + |if this is reproducible, please report bug at https://github.com/lampepfl/dotty/issues + """, sym.sourcePos) throw ex } } diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala index 02268c2919ba..42f8ef7f4ef6 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala @@ -19,51 +19,13 @@ import dotty.tools.dotc.report */ trait BCodeIdiomatic { val int: DottyBackendInterface - final lazy val bTypes = new BTypesFromSymbols[int.type](int) + val bTypes: BTypesFromSymbols[int.type] import int.{_, given} import bTypes._ import coreBTypes._ - - lazy val target = - val releaseValue = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) - val targetValue = Option(ctx.settings.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) - val defaultTarget = "8" - (releaseValue, targetValue) match - case (Some(release), None) => release - case (None, Some(target)) => target - case (Some(release), Some(_)) => - report.warning(s"The value of ${ctx.settings.XuncheckedJavaOutputVersion.name} was overridden by ${ctx.settings.javaOutputVersion.name}") - release - case (None, None) => "8" // least supported version by default - - - // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings - lazy val classfileVersion: Int = target match { - case "8" => asm.Opcodes.V1_8 - case "9" => asm.Opcodes.V9 - case "10" => asm.Opcodes.V10 - case "11" => asm.Opcodes.V11 - case "12" => asm.Opcodes.V12 - case "13" => asm.Opcodes.V13 - case "14" => asm.Opcodes.V14 - case "15" => asm.Opcodes.V15 - case "16" => asm.Opcodes.V16 - case "17" => asm.Opcodes.V17 - case "18" => asm.Opcodes.V18 - case "19" => asm.Opcodes.V19 - } - - lazy val majorVersion: Int = (classfileVersion & 0xFF) - lazy val emitStackMapFrame = (majorVersion >= 50) - - val extraProc: Int = - import GenBCodeOps.addFlagIf - asm.ClassWriter.COMPUTE_MAXS - .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) - lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName val CLASS_CONSTRUCTOR_NAME = "" @@ -619,6 +581,16 @@ trait BCodeIdiomatic { // can-multi-thread final def drop(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) } + // can-multi-thread + final def dropMany(size: Int): Unit = { + var s = size + while s >= 2 do + emit(Opcodes.POP2) + s -= 2 + if s > 0 then + emit(Opcodes.POP) + } + // can-multi-thread final def dup(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) } diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index a524d5fb5a8b..0a11fb898b48 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -45,7 +45,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { /** The value is put on the stack, and control flows through to the next opcode. */ case FallThrough /** The value is put on the stack, and control flow is transferred to the given `label`. */ - case Jump(label: asm.Label) + case Jump(label: asm.Label, targetStackHeight: Int) /** The value is RETURN'ed from the enclosing method. */ case Return /** The value is ATHROW'n. */ @@ -151,7 +151,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { // !!! Part of this logic is duplicated in JSCodeGen.genCompilationUnit claszSymbol.info.decls.foreach { f => - if f.isField && !f.name.is(LazyBitMapName) then + if f.isField && !f.name.is(LazyBitMapName) && !f.name.is(LazyLocalName) then f.setFlag(JavaStatic) } @@ -271,7 +271,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { val flags = javaFlags(claszSymbol) val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner) - cnode.visit(classfileVersion, flags, + cnode.visit(backendUtils.classfileVersion, flags, thisName, thisSignature, superClass, interfaceNames.toArray) @@ -368,6 +368,8 @@ trait BCodeSkelBuilder extends BCodeHelpers { // used by genLoadTry() and genSynchronized() var earlyReturnVar: Symbol = null var shouldEmitCleanup = false + // stack tracking + var stackHeight = 0 // line numbers var lastEmittedLineNr = -1 @@ -504,6 +506,13 @@ trait BCodeSkelBuilder extends BCodeHelpers { loc } + def makeTempLocal(tk: BType): Local = + assert(nxtIdx != -1, "not a valid start index") + assert(tk.size > 0, "makeLocal called for a symbol whose type is Unit.") + val loc = Local(tk, "temp", nxtIdx, isSynth = true) + nxtIdx += tk.size + loc + // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol. def store(locSym: Symbol): Unit = { val Local(tk, _, idx, _) = slots(locSym) @@ -547,11 +556,17 @@ trait BCodeSkelBuilder extends BCodeHelpers { case _ => false } ) } def lineNumber(tree: Tree): Unit = { + @tailrec + def getNonLabelNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match { + case a: asm.tree.LabelNode => getNonLabelNode(a.getPrevious) + case _ => a + } + if (!emitLines || !tree.span.exists) return; val nr = ctx.source.offsetToLine(tree.span.point) + 1 if (nr != lastEmittedLineNr) { lastEmittedLineNr = nr - lastInsn match { + getNonLabelNode(lastInsn) match { case lnn: asm.tree.LineNumberNode => // overwrite previous landmark as no instructions have been emitted for it lnn.line = nr @@ -574,6 +589,8 @@ trait BCodeSkelBuilder extends BCodeHelpers { earlyReturnVar = null shouldEmitCleanup = false + stackHeight = 0 + lastEmittedLineNr = -1 } @@ -748,7 +765,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { if (params.size > MaximumJvmParameters) { // SI-7324 - report.error(s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.", ctx.source.atSpan(methSymbol.span)) + report.error(em"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.", ctx.source.atSpan(methSymbol.span)) return } @@ -800,9 +817,10 @@ trait BCodeSkelBuilder extends BCodeHelpers { val veryFirstProgramPoint = currProgramPoint() if trimmedRhs == tpd.EmptyTree then - report.error("Concrete method has no definition: " + dd + ( - if (ctx.settings.Ydebug.value) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" - else ""), + report.error( + em"Concrete method has no definition: $dd${ + if (ctx.settings.Ydebug.value) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" + else ""}", ctx.source.atSpan(NoSpan) ) else diff --git a/compiler/src/dotty/tools/backend/jvm/BTypes.scala b/compiler/src/dotty/tools/backend/jvm/BTypes.scala index 57bd343b6658..5539bf44aa17 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypes.scala @@ -14,7 +14,9 @@ import scala.tools.asm * This representation is immutable and independent of the compiler data structures, hence it can * be queried by concurrent threads. */ -abstract class BTypes { +abstract class BTypes { self => + val frontendAccess: PostProcessorFrontendAccess + import frontendAccess.{frontendSynch} val int: DottyBackendInterface import int.given @@ -37,10 +39,7 @@ abstract class BTypes { */ def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName) - // Some core BTypes are required here, in class BType, where no Global instance is available. - // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual - // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol. - val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type] + val coreBTypes: CoreBTypes { val bTypes: self.type} import coreBTypes._ /** @@ -862,3 +861,12 @@ abstract class BTypes { */ /*final*/ case class MethodNameAndType(name: String, methodType: MethodBType) } + +object BTypes { + /** + * A marker for strings that represent class internal names. + * Ideally the type would be incompatible with String, for example by making it a value class. + * But that would create overhead in a Collection[InternalName]. + */ + type InternalName = String +} diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index 54dafe6f0032..884dd19ee64f 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -14,20 +14,14 @@ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.core.Phases /** * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary * information from a symbol and its type to create the corresponding ClassBType. It requires * access to the compiler (global parameter). - * - * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes - * uses classBTypeFromSymbol, hence requires access to the compiler (global). - * - * BTypesFromSymbols extends BTypes because the implementation of BTypes requires access to some - * of the core btypes. They are declared in BTypes as abstract members. Note that BTypes does - * not have access to the compiler instance. */ -class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { +class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAccess: PostProcessorFrontendAccess) extends BTypes { import int.{_, given} import DottyBackendInterface.{symExtensions, _} @@ -37,39 +31,18 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int) import bCodeAsmCommon._ - // Why the proxy, see documentation of class [[CoreBTypes]]. - val coreBTypes: CoreBTypesProxy[this.type] = new CoreBTypesProxy[this.type](this) - import coreBTypes._ - - final def intializeCoreBTypes(): Unit = { - coreBTypes.setBTypes(new CoreBTypes[this.type](this)) - } - - private[this] val perRunCaches: Caches = new Caches { - def newAnyRefMap[K <: AnyRef, V](): mutable.AnyRefMap[K, V] = new mutable.AnyRefMap[K, V]() - def newWeakMap[K, V](): mutable.WeakHashMap[K, V] = new mutable.WeakHashMap[K, V]() - def recordCache[T <: Clearable](cache: T): T = cache - def newMap[K, V](): mutable.HashMap[K, V] = new mutable.HashMap[K, V]() - def newSet[K](): mutable.Set[K] = new mutable.HashSet[K] - } - - // TODO remove abstraction - private abstract class Caches { - def recordCache[T <: Clearable](cache: T): T - def newWeakMap[K, V](): collection.mutable.WeakHashMap[K, V] - def newMap[K, V](): collection.mutable.HashMap[K, V] - def newSet[K](): collection.mutable.Set[K] - def newAnyRefMap[K <: AnyRef, V](): collection.mutable.AnyRefMap[K, V] + val coreBTypes = new CoreBTypesFromSymbols[I]{ + val bTypes: BTypesFromSymbols.this.type = BTypesFromSymbols.this } + import coreBTypes._ - @threadUnsafe protected lazy val classBTypeFromInternalNameMap = { - perRunCaches.recordCache(collection.concurrent.TrieMap.empty[String, ClassBType]) - } + @threadUnsafe protected lazy val classBTypeFromInternalNameMap = + collection.concurrent.TrieMap.empty[String, ClassBType] /** * Cache for the method classBTypeFromSymbol. */ - @threadUnsafe private lazy val convertedClasses = perRunCaches.newMap[Symbol, ClassBType]() + @threadUnsafe private lazy val convertedClasses = collection.mutable.HashMap.empty[Symbol, ClassBType] /** * The ClassBType for a class symbol `sym`. diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala new file mode 100644 index 000000000000..2eaaccdd441d --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -0,0 +1,182 @@ +package dotty.tools.backend.jvm + +import scala.tools.asm +import scala.tools.asm.Handle +import scala.tools.asm.tree.InvokeDynamicInsnNode +import asm.tree.ClassNode +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import dotty.tools.dotc.report + +import scala.language.unsafeNulls + +/** + * This component hosts tools and utilities used in the backend that require access to a `BTypes` + * instance. + */ +class BackendUtils(val postProcessor: PostProcessor) { + import postProcessor.{bTypes, frontendAccess} + import frontendAccess.{compilerSettings} + import bTypes.* + import coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle + + // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings + lazy val classfileVersion: Int = compilerSettings.target match { + case "8" => asm.Opcodes.V1_8 + case "9" => asm.Opcodes.V9 + case "10" => asm.Opcodes.V10 + case "11" => asm.Opcodes.V11 + case "12" => asm.Opcodes.V12 + case "13" => asm.Opcodes.V13 + case "14" => asm.Opcodes.V14 + case "15" => asm.Opcodes.V15 + case "16" => asm.Opcodes.V16 + case "17" => asm.Opcodes.V17 + case "18" => asm.Opcodes.V18 + case "19" => asm.Opcodes.V19 + case "20" => asm.Opcodes.V20 + case "21" => asm.Opcodes.V21 + } + + lazy val extraProc: Int = { + import GenBCodeOps.addFlagIf + val majorVersion: Int = (classfileVersion & 0xFF) + val emitStackMapFrame = (majorVersion >= 50) + asm.ClassWriter.COMPUTE_MAXS + .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) + } + + def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { + val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] + for (m <- classNode.methods.asScala) { + val iter = m.instructions.iterator + while (iter.hasNext) { + val insn = iter.next() + insn match { + case indy: InvokeDynamicInsnNode + if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => + import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE + val metafactoryFlags = indy.bsmArgs(3).asInstanceOf[Integer].toInt + val isSerializable = (metafactoryFlags & FLAG_SERIALIZABLE) != 0 + if isSerializable then + val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] + indyLambdaBodyMethods += implMethod + case _ => + } + } + } + indyLambdaBodyMethods.toArray + } + + /* + * Add: + * + * private static Object $deserializeLambda$(SerializedLambda l) { + * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$0](l) + * catch { + * case i: IllegalArgumentException => + * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$1](l) + * catch { + * case i: IllegalArgumentException => + * ... + * return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup${NUM_GROUPS-1}](l) + * } + * + * We use invokedynamic here to enable caching within the deserializer without needing to + * host a static field in the enclosing class. This allows us to add this method to interfaces + * that define lambdas in default methods. + * + * SI-10232 we can't pass arbitrary number of method handles to the final varargs parameter of the bootstrap + * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target + * methods. + */ + def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { + import asm.Opcodes._ + import bTypes._ + import coreBTypes._ + + val cw = classNode + + // Make sure to reference the ClassBTypes of all types that are used in the code generated + // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to + // `classBTypeFromInternalNameMap`. When writing the classfile, the asm ClassWriter computes + // stack map frames and invokes the `getCommonSuperClass` method. This method expects all + // ClassBTypes mentioned in the source code to exist in the map. + + val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor + + val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) + def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { + mv.visitVarInsn(ALOAD, 0) + mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) + } + + val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java + val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray + val numGroups = groups.length + + import scala.tools.asm.Label + val initialLabels = Array.fill(numGroups - 1)(new Label()) + val terminalLabel = new Label + def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) + + for ((label, i) <- initialLabels.iterator.zipWithIndex) { + mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) + } + for ((label, i) <- initialLabels.iterator.zipWithIndex) { + mv.visitLabel(label) + emitLambdaDeserializeIndy(groups(i).toIndexedSeq) + mv.visitInsn(ARETURN) + } + mv.visitLabel(terminalLabel) + emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) + mv.visitInsn(ARETURN) + } + + /** + * Visit the class node and collect all referenced nested classes. + */ + def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { + // type InternalName = String + val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { + def declaredNestedClasses(internalName: InternalName): List[ClassBType] = + bTypes.classBTypeFromInternalName(internalName).info.memberClasses + + def getClassIfNested(internalName: InternalName): Option[ClassBType] = { + val c = bTypes.classBTypeFromInternalName(internalName) + Option.when(c.isNestedClass)(c) + } + + def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { + // don't crash on invalid generic signatures + } + } + c.visit(classNode) + (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) + } + + /* + * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner + * classes in BTypes.scala. + * + * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of + * each inner class it lists (those are looked up and included). + * + * This method serializes in the InnerClasses JVM attribute in an appropriate order, + * not necessarily that given by `refedInnerClasses`. + * + * can-multi-thread + */ + final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { + // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler + val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) + allNestedClasses ++= declaredInnerClasses + refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) + for nestedClass <- allNestedClasses + do { + // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. + val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked + jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) + } + } +} diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala new file mode 100644 index 000000000000..08e84de92dca --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala @@ -0,0 +1,142 @@ +package dotty.tools.backend.jvm + +import java.io.{DataOutputStream, IOException, PrintWriter, StringWriter} +import java.nio.file.Files +import java.util.jar.Attributes.Name + +import scala.tools.asm.ClassReader +import scala.tools.asm.tree.ClassNode +import dotty.tools.io.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.util.NoSourcePosition +import java.nio.charset.StandardCharsets +import java.nio.channels.ClosedByInterruptException +import BTypes.InternalName +import scala.language.unsafeNulls + +class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { + import frontendAccess.{backendReporting, compilerSettings} + + // if non-null, classfiles are additionally written to this directory + private val dumpOutputDir: AbstractFile = getDirectoryOrNull(compilerSettings.dumpClassesDirectory) + + // if non-null, classfiles are written to a jar instead of the output directory + private val jarWriter: JarWriter | Null = compilerSettings.outputDirectory match { + case jar: JarArchive => + val mainClass = compilerSettings.mainClass.orElse { + // If no main class was specified, see if there's only one + // entry point among the classes going into the jar. + frontendAccess.getEntryPoints match { + case name :: Nil => + backendReporting.log(i"Unique entry point: setting Main-Class to $name") + Some(name) + case names => + if names.isEmpty then backendReporting.warning(em"No Main-Class designated or discovered.") + else backendReporting.warning(em"No Main-Class due to multiple entry points:\n ${names.mkString("\n ")}") + None + } + } + jar.underlyingSource.map{ source => + if jar.isEmpty then + val jarMainAttrs = mainClass.map(Name.MAIN_CLASS -> _).toList + new Jar(source.file).jarWriter(jarMainAttrs: _*) + else + // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where + // created using `AbstractFile.bufferedOutputStream`instead of JarWritter + backendReporting.warning(em"Tried to write to non-empty JAR: $source") + null + }.orNull + + case _ => null + } + + private def getDirectoryOrNull(dir: Option[String]): AbstractFile = + dir.map(d => new PlainDirectory(Directory(d))).orNull + + private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { + if (base.file != null) { + fastGetFile(base, clsName, suffix) + } else { + def ensureDirectory(dir: AbstractFile): AbstractFile = + if (dir.isDirectory) dir + else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) + var dir = base + val pathParts = clsName.split("[./]").toList + for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part + ensureDirectory(dir) fileNamed pathParts.last + suffix + } + } + + private def fastGetFile(base: AbstractFile, clsName: String, suffix: String) = { + val index = clsName.lastIndexOf('/') + val (packageName, simpleName) = if (index > 0) { + (clsName.substring(0, index), clsName.substring(index + 1)) + } else ("", clsName) + val directory = base.file.toPath.resolve(packageName) + new PlainFile(Path(directory.resolve(simpleName + suffix))) + } + + private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { + if (outFile.file != null) { + val outPath = outFile.file.toPath + try Files.write(outPath, bytes) + catch { + case _: java.nio.file.NoSuchFileException => + Files.createDirectories(outPath.getParent) + Files.write(outPath, bytes) + } + } else { + val out = new DataOutputStream(outFile.bufferedOutput) + try out.write(bytes, 0, bytes.length) + finally out.close() + } + } + + def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile | Null = try { + // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) + val outFile = writeToJarOrFile(className, bytes, ".class") + // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) + + if (dumpOutputDir != null) { + val dumpFile = getFile(dumpOutputDir, className, ".class") + writeBytes(dumpFile, bytes) + } + outFile + } catch { + case e: FileConflictException => + backendReporting.error(em"error writing $className: ${e.getMessage}") + null + case e: java.nio.file.FileSystemException => + if compilerSettings.debug then e.printStackTrace() + backendReporting.error(em"error writing $className: ${e.getClass.getName} ${e.getMessage}") + null + } + + def writeTasty(className: InternalName, bytes: Array[Byte]): Unit = + writeToJarOrFile(className, bytes, ".tasty") + + private def writeToJarOrFile(className: InternalName, bytes: Array[Byte], suffix: String): AbstractFile | Null = { + if jarWriter == null then + val outFolder = compilerSettings.outputDirectory + val outFile = getFile(outFolder, className, suffix) + try writeBytes(outFile, bytes) + catch case ex: ClosedByInterruptException => + try outFile.delete() // don't leave an empty or half-written files around after an interrupt + catch case _: Throwable => () + finally throw ex + outFile + else + val path = className + suffix + val out = jarWriter.newOutputStream(path) + try out.write(bytes, 0, bytes.length) + finally out.flush() + null + } + + def close(): Unit = { + if (jarWriter != null) jarWriter.close() + } +} + +/** Can't output a file due to the state of the file system. */ +class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala new file mode 100644 index 000000000000..c9f9e4e23d90 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -0,0 +1,181 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Phases.Phase + +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.interfaces +import dotty.tools.dotc.report + +import java.util.Optional +import dotty.tools.dotc.sbt.ExtractDependencies +import dotty.tools.dotc.core._ +import Contexts._ +import Phases._ +import Symbols._ +import StdNames.nme + +import java.io.DataOutputStream +import java.nio.channels.ClosedByInterruptException + +import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } + +import scala.tools.asm +import scala.tools.asm.tree._ +import tpd._ +import dotty.tools.io.AbstractFile +import dotty.tools.dotc.util.NoSourcePosition + + +class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( val bTypes: BTypesFromSymbols[int.type]) { self => + import DottyBackendInterface.symExtensions + import bTypes._ + import int.given + + private lazy val mirrorCodeGen = Impl.JMirrorBuilder() + + def genUnit(unit: CompilationUnit): GeneratedDefs = { + val generatedClasses = mutable.ListBuffer.empty[GeneratedClass] + val generatedTasty = mutable.ListBuffer.empty[GeneratedTasty] + + def genClassDef(cd: TypeDef): Unit = + try + val sym = cd.symbol + val sourceFile = unit.source.file + + def registerGeneratedClass(classNode: ClassNode, isArtifact: Boolean): Unit = + generatedClasses += GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated(classNode, sym, unit.source)) + + val plainC = genClass(cd, unit) + registerGeneratedClass(plainC, isArtifact = false) + + val attrNode = + if !sym.isTopLevelModuleClass then plainC + else if sym.companionClass == NoSymbol then + val mirrorC = genMirrorClass(sym, unit) + registerGeneratedClass(mirrorC, isArtifact = true) + mirrorC + else + report.log(s"No mirror class for module with linked class: ${sym.fullName}", NoSourcePosition) + plainC + + if sym.isClass then + genTastyAndSetAttributes(sym, attrNode) + catch + case ex: Throwable => + ex.printStackTrace() + report.error(s"Error while emitting ${unit.source}\n${ex.getMessage}", NoSourcePosition) + + + def genTastyAndSetAttributes(claszSymbol: Symbol, store: ClassNode): Unit = + import Impl.createJAttribute + for (binary <- unit.pickled.get(claszSymbol.asClass)) { + generatedTasty += GeneratedTasty(store, binary) + val tasty = + val uuid = new TastyHeaderUnpickler(binary()).readHeader() + val lo = uuid.getMostSignificantBits + val hi = uuid.getLeastSignificantBits + + // TASTY attribute is created but only the UUID bytes are stored in it. + // A TASTY attribute has length 16 if and only if the .tasty file exists. + val buffer = new TastyBuffer(16) + buffer.writeUncompressedLong(lo) + buffer.writeUncompressedLong(hi) + buffer.bytes + + val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) + store.visitAttribute(dataAttr) + } + + def genClassDefs(tree: Tree): Unit = + tree match { + case EmptyTree => () + case PackageDef(_, stats) => stats foreach genClassDefs + case ValDef(_, _, _) => () // module val not emitted + case td: TypeDef => genClassDef(td) + } + + genClassDefs(unit.tpdTree) + GeneratedDefs(generatedClasses.toList, generatedTasty.toList) + } + + // Creates a callback that will be evaluated in PostProcessor after creating a file + private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: interfaces.SourceFile): AbstractFile => Unit = clsFile => { + val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { + (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) + } + + val className = cls.name.replace('/', '.') + if (ctx.compilerCallback != null) + ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) + + if (ctx.sbtCallback != null) { + val jSourceFile = sourceFile.jfile.orElse(null) + val cb = ctx.sbtCallback + if (isLocal) cb.generatedLocalClass(jSourceFile, clsFile.file) + else cb.generatedNonLocalClass(jSourceFile, clsFile.file, className, fullClassName) + } + } + + /** Convert a `dotty.tools.io.AbstractFile` into a + * `dotty.tools.dotc.interfaces.AbstractFile`. + */ + private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = + new interfaces.AbstractFile { + override def name = absfile.name + override def path = absfile.path + override def jfile = Optional.ofNullable(absfile.file) + } + + private def genClass(cd: TypeDef, unit: CompilationUnit): ClassNode = { + val b = new Impl.PlainClassBuilder(unit) + b.genPlainClass(cd) + val cls = b.cnode + checkForCaseConflict(cls.name, cd.symbol) + cls + } + + private def genMirrorClass(classSym: Symbol, unit: CompilationUnit): ClassNode = { + val cls = mirrorCodeGen.genMirrorClass(classSym, unit) + checkForCaseConflict(cls.name, classSym) + cls + } + + private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] + private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { + val lowerCaseName = javaClassName.toLowerCase + lowerCaseNames.get(lowerCaseName) match { + case None => + lowerCaseNames.put(lowerCaseName, classSymbol) + case Some(dupClassSym) => + // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting + val (cl1, cl2) = + if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) + else (dupClassSym, classSymbol) + val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString + atPhase(typerPhase) { + if same then + // FIXME: This should really be an error, but then FromTasty tests fail + report.warning(s"${cl1.show} and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) + else + report.warning(s"${cl1.show} differs only in case from ${cl2.showLocated}. " + + "Such classes will overwrite one another on case-insensitive filesystems.", cl1.sourcePos) + } + } + } + + sealed transparent trait ImplEarlyInit{ + val int: self.int.type = self.int + val bTypes: self.bTypes.type = self.bTypes + protected val primitives: DottyPrimitives = self.primitives + } + object Impl extends ImplEarlyInit with BCodeSyncAndTry { + class PlainClassBuilder(unit: CompilationUnit) extends SyncAndTryBuilder(unit) + } +} diff --git a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala index e94bda16fbb8..30ad6b29b9f0 100644 --- a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala @@ -7,38 +7,58 @@ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.transform.Erasure import scala.tools.asm.{Handle, Opcodes} import dotty.tools.dotc.core.StdNames +import BTypes.InternalName + +abstract class CoreBTypes { + val bTypes: BTypes + import bTypes._ + + def primitiveTypeMap: Map[Symbol, PrimitiveBType] + + def boxedClasses: Set[ClassBType] + + def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] + + def boxResultType: Map[Symbol, ClassBType] + + def unboxResultType: Map[Symbol, PrimitiveBType] + + def srNothingRef : ClassBType + def srNullRef : ClassBType + + def ObjectRef : ClassBType + def StringRef : ClassBType + def jlStringBuilderRef : ClassBType + def jlStringBufferRef : ClassBType + def jlCharSequenceRef : ClassBType + def jlClassRef : ClassBType + def jlThrowableRef : ClassBType + def jlCloneableRef : ClassBType + def jiSerializableRef : ClassBType + def jlClassCastExceptionRef : ClassBType + def jlIllegalArgExceptionRef : ClassBType + def jliSerializedLambdaRef : ClassBType + + def srBoxesRuntimeRef: ClassBType + + def jliLambdaMetaFactoryMetafactoryHandle : Handle + def jliLambdaMetaFactoryAltMetafactoryHandle : Handle + def jliLambdaDeserializeBootstrapHandle : Handle + def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle + + def asmBoxTo : Map[BType, MethodNameAndType] + def asmUnboxTo: Map[BType, MethodNameAndType] + + def typeOfArrayOp: Map[Int, BType] +} + +abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTypes { + val bTypes: BTypesFromSymbols[I] -/** - * Core BTypes and some other definitions. The initialization of these definitions requies access - * to symbols / types (global). - * - * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To - * make sure the definitions are consistent with the symbols in the current run, the - * `intializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each - * compiler run. - * - * The class BTypesFromSymbols does not directly reference CoreBTypes, but CoreBTypesProxy. The - * reason is that having a `var bTypes: CoreBTypes` would not allow `import bTypes._`. Instead, the - * proxy class holds a `CoreBTypes` in a variable field and forwards to this instance. - * - * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When - * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the - * constructor will actucally go through the proxy. The lazy vals make sure the instance is assigned - * in the proxy before the fields are initialized. - * - * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap - * could not be a perRunCache anymore: the classes defeined here need to be in that map, they are - * added when the ClassBTypes are created. The per run cache removes them, so they would be missing - * in the second run. - */ -class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) { import bTypes._ import int.given import DottyBackendInterface._ - - //import global._ - //import rootMirror.{requiredClass, getClassIfDefined} - //import definitions._ + import dotty.tools.dotc.core.Contexts.Context /** * Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above @@ -56,31 +76,21 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp defn.DoubleClass -> DOUBLE ) - private lazy val BOXED_UNIT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Void]) - private lazy val BOXED_BOOLEAN : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Boolean]) - private lazy val BOXED_BYTE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Byte]) - private lazy val BOXED_SHORT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Short]) - private lazy val BOXED_CHAR : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Character]) - private lazy val BOXED_INT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Integer]) - private lazy val BOXED_LONG : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Long]) - private lazy val BOXED_FLOAT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Float]) - private lazy val BOXED_DOUBLE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Double]) - /** * Map from primitive types to their boxed class type. Useful when pushing class literals onto the * operand stack (ldc instruction taking a class literal), see genConstant. */ lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map( - UNIT -> BOXED_UNIT, - BOOL -> BOXED_BOOLEAN, - BYTE -> BOXED_BYTE, - SHORT -> BOXED_SHORT, - CHAR -> BOXED_CHAR, - INT -> BOXED_INT, - LONG -> BOXED_LONG, - FLOAT -> BOXED_FLOAT, - DOUBLE -> BOXED_DOUBLE - ) + UNIT -> classBTypeFromSymbol(requiredClass[java.lang.Void]), + BOOL -> classBTypeFromSymbol(requiredClass[java.lang.Boolean]), + BYTE -> classBTypeFromSymbol(requiredClass[java.lang.Byte]), + SHORT -> classBTypeFromSymbol(requiredClass[java.lang.Short]), + CHAR -> classBTypeFromSymbol(requiredClass[java.lang.Character]), + INT -> classBTypeFromSymbol(requiredClass[java.lang.Integer]), + LONG -> classBTypeFromSymbol(requiredClass[java.lang.Long]), + FLOAT -> classBTypeFromSymbol(requiredClass[java.lang.Float]), + DOUBLE -> classBTypeFromSymbol(requiredClass[java.lang.Double]) + ) lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet @@ -114,33 +124,35 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp * names of NothingClass and NullClass can't be emitted as-is. * TODO @lry Once there's a 2.11.3 starr, use the commented argument list. The current starr crashes on the type literal `scala.runtime.Nothing$` */ - lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) // (requiredClass[scala.runtime.Nothing$]) - lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) // (requiredClass[scala.runtime.Null$]) - - lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) - lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) - lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) - lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) - lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) - lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) - lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) - lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) // java/lang/Cloneable - lazy val jioSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) // java/io/Serializable - lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) // java/lang/ClassCastException - lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) - lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) - - lazy val srBoxesRunTimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) - - private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) - private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) - private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodHandle]) - private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodHandles.Lookup]) - private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) - private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 - private lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) - - lazy val jliLambdaMetaFactoryMetafactoryHandle: Handle = new Handle( + lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) + lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) + + lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) + lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) + + lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) + lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) + lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) + lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) + lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) + lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) + lazy val jiSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) + lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) + lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) + lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) + + lazy val srBoxesRuntimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) + + private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) + private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) + private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(defn.MethodHandleClass) + private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(defn.MethodHandlesLookupClass) + private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) + private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 + + lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) + + lazy val jliLambdaMetaFactoryMetafactoryHandle = new Handle( Opcodes.H_INVOKESTATIC, jliLambdaMetafactoryRef.internalName, "metafactory", @@ -150,7 +162,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp ).descriptor, /* itf = */ false) - lazy val jliLambdaMetaFactoryAltMetafactoryHandle: Handle = new Handle( + lazy val jliLambdaMetaFactoryAltMetafactoryHandle = new Handle( Opcodes.H_INVOKESTATIC, jliLambdaMetafactoryRef.internalName, "altMetafactory", @@ -159,7 +171,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp jliCallSiteRef ).descriptor, /* itf = */ false) - + lazy val jliLambdaDeserializeBootstrapHandle: Handle = new Handle( Opcodes.H_INVOKESTATIC, srLambdaDeserialize.internalName, @@ -179,19 +191,19 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp jliCallSiteRef ).descriptor, /* itf = */ false) - + /** * Methods in scala.runtime.BoxesRuntime */ lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map( - BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), BOXED_BOOLEAN)), - BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), BOXED_BYTE)), - CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), BOXED_CHAR)), - SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), BOXED_SHORT)), - INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), BOXED_INT)), - LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), BOXED_LONG)), - FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), BOXED_FLOAT)), - DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), BOXED_DOUBLE)) + BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), boxedClassOfPrimitive(BOOL))), + BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), boxedClassOfPrimitive(BYTE))), + CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), boxedClassOfPrimitive(CHAR))), + SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), boxedClassOfPrimitive(SHORT))), + INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), boxedClassOfPrimitive(INT))), + LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), boxedClassOfPrimitive(LONG))), + FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), boxedClassOfPrimitive(FLOAT))), + DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), boxedClassOfPrimitive(DOUBLE))) ) lazy val asmUnboxTo: Map[BType, MethodNameAndType] = Map( @@ -220,75 +232,3 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp ) } } - -/** - * This trait make some core BTypes availalbe that don't depend on a Global instance. Some core - * BTypes are required to be accessible in the BTypes trait, which does not have access to Global. - * - * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example - * the type Symbol in - * def primitiveTypeMap: Map[Symbol, PrimitiveBType] - */ -trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { - val bTypes: BTS - import bTypes._ - - def boxedClasses: Set[ClassBType] - - def srNothingRef : ClassBType - def srNullRef : ClassBType - - def ObjectRef : ClassBType - def jlCloneableRef : ClassBType - def jiSerializableRef : ClassBType -} - -/** - * See comment in class [[CoreBTypes]]. - */ -final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) extends CoreBTypesProxyGlobalIndependent[BTFS] { - import bTypes._ - - private var _coreBTypes: CoreBTypes[bTypes.type] = _ - def setBTypes(coreBTypes: CoreBTypes[BTFS]): Unit = { - _coreBTypes = coreBTypes.asInstanceOf[CoreBTypes[bTypes.type]] - } - - def primitiveTypeMap: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeMap - - def boxedClasses: Set[ClassBType] = _coreBTypes.boxedClasses - - def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _coreBTypes.boxedClassOfPrimitive - - def boxResultType: Map[Symbol, ClassBType] = _coreBTypes.boxResultType - - def unboxResultType: Map[Symbol, PrimitiveBType] = _coreBTypes.unboxResultType - - def srNothingRef : ClassBType = _coreBTypes.srNothingRef - def srNullRef : ClassBType = _coreBTypes.srNullRef - - def ObjectRef : ClassBType = _coreBTypes.ObjectRef - def StringRef : ClassBType = _coreBTypes.StringRef - def jlStringBuilderRef : ClassBType = _coreBTypes.jlStringBuilderRef - def jlStringBufferRef : ClassBType = _coreBTypes.jlStringBufferRef - def jlCharSequenceRef : ClassBType = _coreBTypes.jlCharSequenceRef - def jlClassRef : ClassBType = _coreBTypes.jlClassRef - def jlThrowableRef : ClassBType = _coreBTypes.jlThrowableRef - def jlCloneableRef : ClassBType = _coreBTypes.jlCloneableRef - def jiSerializableRef : ClassBType = _coreBTypes.jioSerializableRef - def jlClassCastExceptionRef : ClassBType = _coreBTypes.jlClassCastExceptionRef - def jlIllegalArgExceptionRef : ClassBType = _coreBTypes.jlIllegalArgExceptionRef - def jliSerializedLambdaRef : ClassBType = _coreBTypes.jliSerializedLambdaRef - - def srBoxesRuntimeRef: ClassBType = _coreBTypes.srBoxesRunTimeRef - - def jliLambdaMetaFactoryMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryMetafactoryHandle - def jliLambdaMetaFactoryAltMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - def jliLambdaDeserializeBootstrapHandle : Handle = _coreBTypes.jliLambdaDeserializeBootstrapHandle - def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle = _coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle - - def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo - def asmUnboxTo: Map[BType, MethodNameAndType] = _coreBTypes.asmUnboxTo - - def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp -} diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index 5461ff81341c..b2278c3f0ce8 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -14,6 +14,7 @@ import Contexts._ import Types._ import Symbols._ import Phases._ +import Decorators.em import dotty.tools.dotc.util.ReadOnlyMap import dotty.tools.dotc.report @@ -21,10 +22,10 @@ import dotty.tools.dotc.report import tpd._ import StdNames.nme -import NameKinds.LazyBitMapName +import NameKinds.{LazyBitMapName, LazyLocalName} import Names.Name -class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { +class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] @@ -71,7 +72,7 @@ class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap def _1: Type = field.tpe match { case JavaArrayType(elem) => elem case _ => - report.error(s"JavaSeqArray with type ${field.tpe} reached backend: $field", ctx.source.atSpan(field.span)) + report.error(em"JavaSeqArray with type ${field.tpe} reached backend: $field", ctx.source.atSpan(field.span)) UnspecifiedErrorType } def _2: List[Tree] = field.elems @@ -128,10 +129,11 @@ object DottyBackendInterface { * the new lazy val encoding: https://github.com/lampepfl/dotty/issues/7140 */ def isStaticModuleField(using Context): Boolean = - sym.owner.isStaticModuleClass && sym.isField && !sym.name.is(LazyBitMapName) + sym.owner.isStaticModuleClass && sym.isField && !sym.name.is(LazyBitMapName) && !sym.name.is(LazyLocalName) def isStaticMember(using Context): Boolean = (sym ne NoSymbol) && - (sym.is(JavaStatic) || sym.isScalaStatic || sym.isStaticModuleField) + (sym.is(JavaStatic) || sym.isScalaStatic || sym.isStaticModuleField) + // guard against no sumbol cause this code is executed to select which call type(static\dynamic) to use to call array.clone /** diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 73e8fd9edb3b..469a6ea57679 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -1,42 +1,16 @@ package dotty.tools.backend.jvm -import scala.language.unsafeNulls - import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} -import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Phases.Phase - -import scala.collection.mutable -import scala.jdk.CollectionConverters._ -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.interfaces import dotty.tools.dotc.report - -import dotty.tools.dotc.util.SourceFile -import java.util.Optional - import dotty.tools.dotc.core._ -import dotty.tools.dotc.sbt.ExtractDependencies +import dotty.tools.dotc.interfaces.CompilerCallback import Contexts._ -import Phases._ import Symbols._ - -import java.io.DataOutputStream -import java.nio.channels.ClosedByInterruptException - -import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } - -import scala.tools.asm -import scala.tools.asm.Handle -import scala.tools.asm.tree._ -import tpd._ -import StdNames._ import dotty.tools.io._ -import scala.tools.asm.MethodTooLargeException -import scala.tools.asm.ClassTooLargeException +import scala.collection.mutable -class GenBCode extends Phase { +class GenBCode extends Phase { self => override def phaseName: String = GenBCode.name @@ -51,618 +25,85 @@ class GenBCode extends Phase { private val entryPoints = new mutable.HashSet[String]() def registerEntryPoint(s: String): Unit = entryPoints += s - private var myOutput: AbstractFile = _ - - private def outputDir(using Context): AbstractFile = { - if (myOutput eq null) - myOutput = ctx.settings.outputDir.value - myOutput + private var _backendInterface: DottyBackendInterface = _ + def backendInterface(using ctx: Context): DottyBackendInterface = { + if _backendInterface eq null then + // Enforce usage of FreshContext so we would be able to modify compilation unit between runs + val backendCtx = ctx match + case fc: FreshContext => fc + case ctx => ctx.fresh + _backendInterface = DottyBackendInterface(superCallsMap)(using backendCtx) + _backendInterface } - private var myPrimitives: DottyPrimitives = null - - override def run(using Context): Unit = - if myPrimitives == null then myPrimitives = new DottyPrimitives(ctx) - new GenBCodePipeline( - DottyBackendInterface(outputDir, superCallsMap), - myPrimitives - ).run(ctx.compilationUnit.tpdTree) - - - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - outputDir match - case jar: JarArchive => - updateJarManifestWithMainClass(jar, entryPoints.toList) - case _ => - try super.runOn(units) - finally outputDir match { - case jar: JarArchive => - if (ctx.run.nn.suspendedUnits.nonEmpty) - // If we close the jar the next run will not be able to write on the jar. - // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. - report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") - - jar.close() - case _ => - } + private var _codeGen: CodeGen = _ + def codeGen(using Context): CodeGen = { + if _codeGen eq null then + val int = backendInterface + val dottyPrimitives = new DottyPrimitives(ctx) + _codeGen = new CodeGen(int, dottyPrimitives)(bTypes.asInstanceOf[BTypesFromSymbols[int.type]]) + _codeGen } - private def updateJarManifestWithMainClass(jarArchive: JarArchive, entryPoints: List[String])(using Context): Unit = - val mainClass = Option.when(!ctx.settings.XmainClass.isDefault)(ctx.settings.XmainClass.value).orElse { - entryPoints match - case List(mainClass) => - Some(mainClass) - case Nil => - report.warning("No Main-Class designated or discovered.") - None - case mcs => - report.warning(s"No Main-Class due to multiple entry points:\n ${mcs.mkString("\n ")}") - None - } - mainClass.map { mc => - val manifest = Jar.WManifest() - manifest.mainClass = mc - val file = jarArchive.subdirectoryNamed("META-INF").fileNamed("MANIFEST.MF") - val os = file.output - manifest.underlying.write(os) - os.close() - } - end updateJarManifestWithMainClass -} - -object GenBCode { - val name: String = "genBCode" - val description: String = "generate JVM bytecode" -} - -class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrimitives)(using Context) extends BCodeSyncAndTry { - import DottyBackendInterface.symExtensions - - private var tree: Tree = _ - - private val sourceFile: SourceFile = ctx.compilationUnit.source - - /** Convert a `dotty.tools.io.AbstractFile` into a - * `dotty.tools.dotc.interfaces.AbstractFile`. - */ - private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = - new interfaces.AbstractFile { - override def name = absfile.name - override def path = absfile.path - override def jfile = Optional.ofNullable(absfile.file) - } - - final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit) - -// class BCodePhase() { - - private var bytecodeWriter : BytecodeWriter = null - private var mirrorCodeGen : JMirrorBuilder = null - - /* ---------------- q1 ---------------- */ - - case class Item1(arrivalPos: Int, cd: TypeDef, cunit: CompilationUnit) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } + private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = _ + def bTypes(using Context): BTypesFromSymbols[DottyBackendInterface] = { + if _bTypes eq null then + _bTypes = BTypesFromSymbols(backendInterface, frontendAccess) + _bTypes } - private val poison1 = Item1(Int.MaxValue, null, ctx.compilationUnit) - private val q1 = new java.util.LinkedList[Item1] - /* ---------------- q2 ---------------- */ - - case class SubItem2(classNode: asm.tree.ClassNode, - file: dotty.tools.io.AbstractFile) - - case class Item2(arrivalPos: Int, - mirror: SubItem2, - plain: SubItem2) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } + private var _frontendAccess: PostProcessorFrontendAccess | Null = _ + def frontendAccess(using Context): PostProcessorFrontendAccess = { + if _frontendAccess eq null then + _frontendAccess = PostProcessorFrontendAccess.Impl(backendInterface, entryPoints) + _frontendAccess.nn } - private val poison2 = Item2(Int.MaxValue, null, null) - private val q2 = new _root_.java.util.LinkedList[Item2] - - /* ---------------- q3 ---------------- */ - - /* - * An item of queue-3 (the last queue before serializing to disk) contains three of these - * (one for each of mirror and plain classes). - * - * @param jclassName internal name of the class - * @param jclassBytes bytecode emitted for the class SubItem3 represents - */ - case class SubItem3( - jclassName: String, - jclassBytes: Array[Byte], - jclassFile: dotty.tools.io.AbstractFile - ) - - case class Item3(arrivalPos: Int, - mirror: SubItem3, - plain: SubItem3) { - - def isPoison: Boolean = { arrivalPos == Int.MaxValue } - } - private val i3comparator = new java.util.Comparator[Item3] { - override def compare(a: Item3, b: Item3) = { - if (a.arrivalPos < b.arrivalPos) -1 - else if (a.arrivalPos == b.arrivalPos) 0 - else 1 - } + private var _postProcessor: PostProcessor | Null = _ + def postProcessor(using Context): PostProcessor = { + if _postProcessor eq null then + _postProcessor = new PostProcessor(frontendAccess, bTypes) + _postProcessor.nn } - private val poison3 = Item3(Int.MaxValue, null, null) - private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator) - - /* - * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2 - */ - class Worker1(needsOutFolder: Boolean) { - - private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] - private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { - val lowerCaseName = javaClassName.toLowerCase - lowerCaseNames.get(lowerCaseName) match { - case None => - lowerCaseNames.put(lowerCaseName, classSymbol) - case Some(dupClassSym) => - // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting - val (cl1, cl2) = - if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) - else (dupClassSym, classSymbol) - val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString - atPhase(typerPhase) { - if (same) - report.warning( // FIXME: This should really be an error, but then FromTasty tests fail - s"${cl1.show} and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) - else - report.warning(s"${cl1.show} differs only in case from ${cl2.showLocated}. " + - "Such classes will overwrite one another on case-insensitive filesystems.", cl1.sourcePos) - } - } - } - - def run(): Unit = { - while (true) { - val item = q1.poll - if (item.isPoison) { - q2 add poison2 - return - } - else { - try { /*withCurrentUnit(item.cunit)*/(visit(item)) } - catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.cunit.source.file.name}") - throw ex - } - } - } - } - - /* - * Checks for duplicate internal names case-insensitively, - * builds ASM ClassNodes for mirror and plain classes; - * enqueues them in queue-2. - * - */ - def visit(item: Item1): Boolean = { - val Item1(arrivalPos, cd, cunit) = item - val claszSymbol = cd.symbol - - // -------------- mirror class, if needed -------------- - val mirrorC = - if (claszSymbol.isTopLevelModuleClass) { - if (claszSymbol.companionClass == NoSymbol) { - mirrorCodeGen.genMirrorClass(claszSymbol, cunit) - } else { - report.log(s"No mirror class for module with linked class: ${claszSymbol.showFullName}") - null - } - } else null - - // -------------- "plain" class -------------- - val pcb = new PlainClassBuilder(cunit) - pcb.genPlainClass(cd) - val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName) else null; - val plainC = pcb.cnode - - if (claszSymbol.isClass) // @DarkDimius is this test needed here? - for (binary <- ctx.compilationUnit.pickled.get(claszSymbol.asClass)) { - val store = if (mirrorC ne null) mirrorC else plainC - val tasty = - val outTastyFile = getFileForClassfile(outF, store.name, ".tasty") - val outstream = new DataOutputStream(outTastyFile.bufferedOutput) - try outstream.write(binary()) - catch case ex: ClosedByInterruptException => - try - outTastyFile.delete() // don't leave an empty or half-written tastyfile around after an interrupt - catch - case _: Throwable => - throw ex - finally outstream.close() - - val uuid = new TastyHeaderUnpickler(binary()).readHeader() - val lo = uuid.getMostSignificantBits - val hi = uuid.getLeastSignificantBits - - // TASTY attribute is created but only the UUID bytes are stored in it. - // A TASTY attribute has length 16 if and only if the .tasty file exists. - val buffer = new TastyBuffer(16) - buffer.writeUncompressedLong(lo) - buffer.writeUncompressedLong(hi) - buffer.bytes - - val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) - store.visitAttribute(dataAttr) - } - - - // ----------- create files - - val classNodes = List(mirrorC, plainC) - val classFiles = classNodes.map(cls => - if (outF != null && cls != null) { - try { - checkForCaseConflict(cls.name, claszSymbol) - getFileForClassfile(outF, cls.name, ".class") - } catch { - case e: FileConflictException => - report.error(s"error writing ${cls.name}: ${e.getMessage}") - null - } - } else null - ) - - // ----------- compiler and sbt's callbacks - - val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { - (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) - } - - for ((cls, clsFile) <- classNodes.zip(classFiles)) { - if (cls != null) { - val className = cls.name.replace('/', '.') - if (ctx.compilerCallback != null) - ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - if (ctx.sbtCallback != null) { - if (isLocal) - ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), clsFile.file) - else { - ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), clsFile.file, - className, fullClassName) - } - } - } - } - - // ----------- hand over to pipeline-2 - - val item2 = - Item2(arrivalPos, - SubItem2(mirrorC, classFiles(0)), - SubItem2(plainC, classFiles(1))) - - q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. - } // end of method visit(Item1) - - } // end of class BCodePhase.Worker1 - - /* - * Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level: - * - * (a) no optimization involves: - * - converting the plain ClassNode to byte array and placing it on queue-3 - */ - class Worker2 { - import bTypes.ClassBType - import bTypes.coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - // lazy val localOpt = new LocalOpt(new Settings()) - - private def localOptimizations(classNode: ClassNode): Unit = { - // BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode)) - } - - - /* Return an array of all serializable lambdas in this class */ - private def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { - val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] - for (m <- classNode.methods.asScala) { - val iter = m.instructions.iterator - while (iter.hasNext) { - val insn = iter.next() - insn match { - case indy: InvokeDynamicInsnNode - if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => - import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE - val metafactoryFlags = indy.bsmArgs(3).asInstanceOf[Integer].toInt - val isSerializable = (metafactoryFlags & FLAG_SERIALIZABLE) != 0 - if isSerializable then - val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] - indyLambdaBodyMethods += implMethod - case _ => - } - } - } - indyLambdaBodyMethods.toArray - } - - /* - * Add: - * - * private static Object $deserializeLambda$(SerializedLambda l) { - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$0](l) - * catch { - * case i: IllegalArgumentException => - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$1](l) - * catch { - * case i: IllegalArgumentException => - * ... - * return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup${NUM_GROUPS-1}](l) - * } - * - * We use invokedynamic here to enable caching within the deserializer without needing to - * host a static field in the enclosing class. This allows us to add this method to interfaces - * that define lambdas in default methods. - * - * SI-10232 we can't pass arbitrary number of method handles to the final varargs parameter of the bootstrap - * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target - * methods. - */ - private def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { - import asm.Opcodes._ - import bTypes._ - import coreBTypes._ - - val cw = classNode - - // Make sure to reference the ClassBTypes of all types that are used in the code generated - // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to - // `classBTypeFromInternalNameMap`. When writing the classfile, the asm ClassWriter computes - // stack map frames and invokes the `getCommonSuperClass` method. This method expects all - // ClassBTypes mentioned in the source code to exist in the map. - - val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor - - val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) - def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { - mv.visitVarInsn(ALOAD, 0) - mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) - } - - val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java - val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray - val numGroups = groups.length - - import scala.tools.asm.Label - val initialLabels = Array.fill(numGroups - 1)(new Label()) - val terminalLabel = new Label - def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) - - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) - } - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitLabel(label) - emitLambdaDeserializeIndy(groups(i).toIndexedSeq) - mv.visitInsn(ARETURN) - } - mv.visitLabel(terminalLabel) - emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) - mv.visitInsn(ARETURN) - } - - private def setInnerClasses(classNode: ClassNode): Unit = if (classNode != null) { - classNode.innerClasses.clear() - val (declared, referred) = collectNestedClasses(classNode) - addInnerClasses(classNode, declared, referred) - } - - /** - * Visit the class node and collect all referenced nested classes. - */ - private def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { - // type InternalName = String - val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { - def declaredNestedClasses(internalName: InternalName): List[ClassBType] = - bTypes.classBTypeFromInternalName(internalName).info.memberClasses - - def getClassIfNested(internalName: InternalName): Option[ClassBType] = { - val c = bTypes.classBTypeFromInternalName(internalName) - Option.when(c.isNestedClass)(c) - } - - def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { - // don't crash on invalid generic signatures - } - } - c.visit(classNode) - (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) - } - - def run(): Unit = { - while (true) { - val item = q2.poll - if (item.isPoison) { - q3 add poison3 - return - } - else { - try { - val plainNode = item.plain.classNode - localOptimizations(plainNode) - val serializableLambdas = collectSerializableLambdas(plainNode) - if (serializableLambdas.nonEmpty) - addLambdaDeserialize(plainNode, serializableLambdas) - setInnerClasses(plainNode) - setInnerClasses(item.mirror.classNode) - addToQ3(item) - } catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.plain.classNode.name}") - throw ex - } + override def run(using ctx: Context): Unit = + // CompilationUnit is the only component that will differ between each run invocation + // We need to update it to have correct source positions. + // FreshContext is always enforced when creating backend interface + backendInterface.ctx + .asInstanceOf[FreshContext] + .setCompilationUnit(ctx.compilationUnit) + val generated = codeGen.genUnit(ctx.compilationUnit) + // In Scala 2, the backend might use global optimizations which might delay post-processing to build the call graph. + // In Scala 3, we don't perform backend optimizations and always perform post-processing immediately. + // https://github.com/scala/scala/pull/6057 + postProcessor.postProcessAndSendToDisk(generated) + (ctx.compilerCallback: CompilerCallback | Null) match { + case cb: CompilerCallback => cb.onSourceCompiled(ctx.source) + case null => () + } + + override def runOn(units: List[CompilationUnit])(using ctx:Context): List[CompilationUnit] = { + try super.runOn(units) + finally + // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized + if _frontendAccess ne null then + frontendAccess.compilerSettings.outputDirectory match { + case jar: JarArchive => + if (ctx.run.nn.suspendedUnits.nonEmpty) + // If we close the jar the next run will not be able to write on the jar. + // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. + report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") + + jar.close() + case _ => () } - } - } - - private def addToQ3(item: Item2) = { - - def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = { - val cw = new CClassWriter(extraProc) - cn.accept(cw) - cw.toByteArray - } - - val Item2(arrivalPos, SubItem2(mirror, mirrorFile), SubItem2(plain, plainFile)) = item - - val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror), mirrorFile) - val plainC = SubItem3(plain.name, getByteArray(plain), plainFile) - - if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) { - if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes) - AsmUtils.traceClass(plainC.jclassBytes) - } - - q3 add Item3(arrivalPos, mirrorC, plainC) - } - - } // end of class BCodePhase.Worker2 - - var arrivalPos: Int = 0 - - /* - * A run of the BCodePhase phase comprises: - * - * (a) set-up steps (most notably supporting maps in `BCodeTypes`, - * but also "the" writer where class files in byte-array form go) - * - * (b) building of ASM ClassNodes, their optimization and serialization. - * - * (c) tear down (closing the classfile-writer and clearing maps) - * - */ - def run(t: Tree)(using Context): Unit = { - this.tree = t - - // val bcodeStart = Statistics.startTimer(BackendStats.bcodeTimer) - - // val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer) - arrivalPos = 0 // just in case - // scalaPrimitives.init() - bTypes.intializeCoreBTypes() - // Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart) - - // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated. - bytecodeWriter = initBytecodeWriter() - mirrorCodeGen = new JMirrorBuilder - - val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter] - buildAndSendToDisk(needsOutfileForSymbol) - - // closing output files. - bytecodeWriter.close() - // Statistics.stopTimer(BackendStats.bcodeTimer, bcodeStart) - - if (ctx.compilerCallback != null) - ctx.compilerCallback.onSourceCompiled(sourceFile) - - /* TODO Bytecode can be verified (now that all classfiles have been written to disk) - * - * (1) asm.util.CheckAdapter.verify() - * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw) - * passing a custom ClassLoader to verify inter-dependent classes. - * Alternatively, - * - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool). - * - -Xverify:all - * - * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()` - * - */ + if _postProcessor ne null then + postProcessor.classfileWriter.close() } +} - /* - * Sequentially: - * (a) place all ClassDefs in queue-1 - * (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2 - * (c) dequeue one at a time from queue-2, convert it to byte-array, place in queue-3 - * (d) serialize to disk by draining queue-3. - */ - private def buildAndSendToDisk(needsOutFolder: Boolean)(using Context) = { - try - feedPipeline1() - // val genStart = Statistics.startTimer(BackendStats.bcodeGenStat) - (new Worker1(needsOutFolder)).run() - // Statistics.stopTimer(BackendStats.bcodeGenStat, genStart) - - (new Worker2).run() - - // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) - drainQ3() - // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) - catch - case e: MethodTooLargeException => - val method = - s"${e.getClassName.replaceAll("/", ".")}.${e.getMethodName}" - val msg = - s"Generated bytecode for method '$method' is too large. Size: ${e.getCodeSize} bytes. Limit is 64KB" - report.error(msg) - case e: ClassTooLargeException => - val msg = - s"Class '${e.getClassName.replaceAll("/", ".")}' is too large. Constant pool size: ${e.getConstantPoolCount}. Limit is 64K entries" - report.error(msg) - - } - - /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */ - private def feedPipeline1() = { - def gen(tree: Tree): Unit = { - tree match { - case EmptyTree => () - case PackageDef(_, stats) => stats foreach gen - case ValDef(name, tpt, rhs) => () // module val not emitted - case cd: TypeDef => - q1 add Item1(arrivalPos, cd, int.ctx.compilationUnit) - arrivalPos += 1 - } - } - gen(tree) - q1 add poison1 - } - - /* Pipeline that writes classfile representations to disk. */ - private def drainQ3() = { - - def sendToDisk(cfr: SubItem3): Unit = { - if (cfr != null){ - val SubItem3(jclassName, jclassBytes, jclassFile) = cfr - bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, jclassFile) - } - } - - var moreComing = true - // `expected` denotes the arrivalPos whose Item3 should be serialized next - var expected = 0 - - while (moreComing) { - val incoming = q3.poll - moreComing = !incoming.isPoison - if (moreComing) { - val item = incoming - sendToDisk(item.mirror) - sendToDisk(item.plain) - expected += 1 - } - } - - // we're done - assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1") - assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2") - assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3") - - } - //} // end of class BCodePhase +object GenBCode { + val name: String = "genBCode" + val description: String = "generate JVM bytecode" } diff --git a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala index e9e532933290..c16bc70fc3b0 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala @@ -185,13 +185,13 @@ abstract class GenericSignatureVisitor(nestedOnly: Boolean) { } // Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf -// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 +// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSignatureVisitor(nestedOnly) { - type InternalName = String + type InternalName = String def declaredNestedClasses(internalName: InternalName): List[T] def getClassIfNested(internalName: InternalName): Option[T] - + val declaredInnerClasses = mutable.Set.empty[T] val referredInnerClasses = mutable.Set.empty[T] diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala new file mode 100644 index 000000000000..606b5645aa24 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -0,0 +1,117 @@ +package dotty.tools.backend.jvm + +import scala.collection.mutable.ListBuffer +import dotty.tools.dotc.util.{SourcePosition, NoSourcePosition} +import dotty.tools.io.AbstractFile +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.em +import scala.tools.asm.ClassWriter +import scala.tools.asm.tree.ClassNode + +/** + * Implements late stages of the backend that don't depend on a Global instance, i.e., + * optimizations, post-processing and classfile serialization and writing. + */ +class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: BTypes) { + self => + import bTypes.* + import frontendAccess.{backendReporting, compilerSettings} + import int.given + + val backendUtils = new BackendUtils(this) + val classfileWriter = ClassfileWriter(frontendAccess) + + def postProcessAndSendToDisk(generatedDefs: GeneratedDefs): Unit = { + val GeneratedDefs(classes, tasty) = generatedDefs + for (GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated) <- classes) { + val bytes = + try + if !isArtifact then setSerializableLambdas(classNode) + setInnerClasses(classNode) + serializeClass(classNode) + catch + case e: java.lang.RuntimeException if e.getMessage != null && e.getMessage.nn.contains("too large!") => + backendReporting.error(em"Could not write class ${classNode.name} because it exceeds JVM code size limits. ${e.getMessage}") + null + case ex: Throwable => + ex.printStackTrace() + backendReporting.error(em"Error while emitting ${classNode.name}\n${ex.getMessage}") + null + + if (bytes != null) { + if (AsmUtils.traceSerializedClassEnabled && classNode.name.nn.contains(AsmUtils.traceSerializedClassPattern)) + AsmUtils.traceClass(bytes) + + val clsFile = classfileWriter.writeClass(classNode.name.nn, bytes, sourceFile) + if clsFile != null then onFileCreated(clsFile) + } + } + + for (GeneratedTasty(classNode, binaryGen) <- tasty){ + classfileWriter.writeTasty(classNode.name.nn, binaryGen()) + } + } + + private def setSerializableLambdas(classNode: ClassNode): Unit = { + import backendUtils.{collectSerializableLambdas, addLambdaDeserialize} + val serializableLambdas = collectSerializableLambdas(classNode) + if serializableLambdas.nonEmpty then + addLambdaDeserialize(classNode, serializableLambdas) + } + + private def setInnerClasses(classNode: ClassNode): Unit = { + import backendUtils.{collectNestedClasses, addInnerClasses} + classNode.innerClasses.nn.clear() + val (declared, referred) = collectNestedClasses(classNode) + addInnerClasses(classNode, declared, referred) + } + + def serializeClass(classNode: ClassNode): Array[Byte] = { + val cw = new ClassWriterWithBTypeLub(backendUtils.extraProc) + classNode.accept(cw) + cw.toByteArray.nn + } + + // ----------------------------------------------------------------------------------------- + // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) + // Background: + // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf + // http://comments.gmane.org/gmane.comp.java.vm.languages/2293 + // https://github.com/scala/bug/issues/3872 + // ----------------------------------------------------------------------------------------- + + /* An `asm.ClassWriter` that uses `jvmWiseLUB()` + * The internal name of the least common ancestor of the types given by inameA and inameB. + * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow + */ + final class ClassWriterWithBTypeLub(flags: Int) extends ClassWriter(flags) { + + /** + * This method is used by asm when computing stack map frames. It is thread-safe: it depends + * only on the BTypes component, which does not depend on global. + * TODO @lry move to a different place where no global is in scope, on bTypes. + */ + override def getCommonSuperClass(inameA: String, inameB: String): String = { + // All types that appear in a class node need to have their ClassBType cached, see [[cachedClassBType]]. + val a = classBTypeFromInternalName(inameA) + val b = classBTypeFromInternalName(inameB) + val lub = a.jvmWiseLUB(b) + val lubName = lub.internalName + assert(lubName != "scala/Any") + lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. + } + } +} + +/** + * The result of code generation. [[isArtifact]] is `true` for mirror. + */ +case class GeneratedClass(classNode: ClassNode, sourceFile: AbstractFile, isArtifact: Boolean, onFileCreated: AbstractFile => Unit) +case class GeneratedTasty(classNode: ClassNode, tastyGen: () => Array[Byte]) +case class GeneratedDefs(classes: List[GeneratedClass], tasty: List[GeneratedTasty]) + +// Temporary class, will be refactored in a future commit +trait ClassWriterForPostProcessor { + type InternalName = String + def write(bytes: Array[Byte], className: InternalName, sourceFile: AbstractFile): Unit +} diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala new file mode 100644 index 000000000000..80ee68bc94c3 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala @@ -0,0 +1,79 @@ +package dotty.tools.backend.jvm + +import scala.collection.mutable.{Clearable, HashSet} +import dotty.tools.dotc.util.* +import dotty.tools.dotc.reporting.Message +import dotty.tools.io.AbstractFile +import java.util.{Collection => JCollection, Map => JMap} +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.report +import dotty.tools.dotc.core.Phases + +/** + * Functionality needed in the post-processor whose implementation depends on the compiler + * frontend. All methods are synchronized. + */ +sealed abstract class PostProcessorFrontendAccess { + import PostProcessorFrontendAccess._ + + def compilerSettings: CompilerSettings + def backendReporting: BackendReporting + def getEntryPoints: List[String] + + private val frontendLock: AnyRef = new Object() + inline final def frontendSynch[T](inline x: => T): T = frontendLock.synchronized(x) +} + +object PostProcessorFrontendAccess { + sealed trait CompilerSettings { + def debug: Boolean + def target: String // javaOutputVersion + + def dumpClassesDirectory: Option[String] + def outputDirectory: AbstractFile + + def mainClass: Option[String] + } + + sealed trait BackendReporting { + def error(message: Context ?=> Message): Unit + def warning(message: Context ?=> Message): Unit + def log(message: Context ?=> String): Unit + } + + class Impl[I <: DottyBackendInterface](val int: I, entryPoints: HashSet[String]) extends PostProcessorFrontendAccess { + import int.given + lazy val compilerSettings: CompilerSettings = buildCompilerSettings() + + private def buildCompilerSettings(): CompilerSettings = new CompilerSettings { + extension [T](s: dotty.tools.dotc.config.Settings.Setting[T]) + def valueSetByUser: Option[T] = + Option(s.value).filter(_ != s.default) + def s = ctx.settings + + lazy val target = + val releaseValue = Option(s.javaOutputVersion.value).filter(_.nonEmpty) + val targetValue = Option(s.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) + (releaseValue, targetValue) match + case (Some(release), None) => release + case (None, Some(target)) => target + case (Some(release), Some(_)) => + report.warning(s"The value of ${s.XuncheckedJavaOutputVersion.name} was overridden by ${ctx.settings.javaOutputVersion.name}") + release + case (None, None) => "8" // least supported version by default + + lazy val debug: Boolean = ctx.debug + lazy val dumpClassesDirectory: Option[String] = s.Ydumpclasses.valueSetByUser + lazy val outputDirectory: AbstractFile = s.outputDir.value + lazy val mainClass: Option[String] = s.XmainClass.valueSetByUser + } + + object backendReporting extends BackendReporting { + def error(message: Context ?=> Message): Unit = frontendSynch(report.error(message)) + def warning(message: Context ?=> Message): Unit = frontendSynch(report.warning(message)) + def log(message: Context ?=> String): Unit = frontendSynch(report.log(message)) + } + + def getEntryPoints: List[String] = frontendSynch(entryPoints.toList) + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala index 2d4c3ce5c9c4..bc453aec17af 100644 --- a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala +++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala @@ -8,6 +8,7 @@ import Contexts._ import Names.TermName, StdNames._ import Types.{JavaArrayType, UnspecifiedErrorType, Type} import Symbols.{Symbol, NoSymbol} +import Decorators.em import dotc.report import dotc.util.ReadOnlyMap @@ -66,7 +67,7 @@ class DottyPrimitives(ictx: Context) { case defn.ArrayOf(el) => el case JavaArrayType(el) => el case _ => - report.error(s"expected Array $tpe") + report.error(em"expected Array $tpe") UnspecifiedErrorType } @@ -133,7 +134,7 @@ class DottyPrimitives(ictx: Context) { def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { val alts = cls.info.member(method).alternatives.map(_.symbol) if (alts.isEmpty) - report.error(s"Unknown primitive method $cls.$method") + report.error(em"Unknown primitive method $cls.$method") else alts foreach (s => addPrimitive(s, s.info.paramInfoss match { diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 6714f664620b..eee791852fde 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -125,7 +125,14 @@ class JSCodeGen()(using genCtx: Context) { /** Implicitly materializes the current local name generator. */ implicit def implicitLocalNames: LocalNameGenerator = localNames.get - private def currentClassType = encodeClassType(currentClassSym) + def currentThisType: jstpe.Type = { + encodeClassType(currentClassSym) match { + case tpe @ jstpe.ClassType(cls) => + jstpe.BoxedClassToPrimType.getOrElse(cls, tpe) + case tpe => + tpe + } + } /** Returns a new fresh local identifier. */ private def freshLocalIdent()(implicit pos: Position): js.LocalIdent = @@ -1023,7 +1030,7 @@ class JSCodeGen()(using genCtx: Context) { // Constructor of a non-native JS class ------------------------------------ def genJSClassCapturesAndConstructor(constructorTrees: List[DefDef])( - implicit pos: SourcePosition): (List[js.ParamDef], js.JSMethodDef) = { + implicit pos: SourcePosition): (List[js.ParamDef], js.JSConstructorDef) = { /* We need to merge all Scala constructors into a single one because the * IR, like JavaScript, only allows a single one. * @@ -1095,20 +1102,21 @@ class JSCodeGen()(using genCtx: Context) { (exports.result(), jsClassCaptures.result()) } + // The name 'constructor' is used for error reporting here val (formalArgs, restParam, overloadDispatchBody) = jsExportsGen.genOverloadDispatch(JSName.Literal("constructor"), exports, jstpe.IntType) val overloadVar = js.VarDef(freshLocalIdent("overload"), NoOriginalName, jstpe.IntType, mutable = false, overloadDispatchBody) - val ctorStats = genJSClassCtorStats(overloadVar.ref, ctorTree) - - val constructorBody = js.Block( - paramVarDefs ::: List(overloadVar, ctorStats, js.Undefined())) + val constructorBody = wrapJSCtorBody( + paramVarDefs :+ overloadVar, + genJSClassCtorBody(overloadVar.ref, ctorTree), + js.Undefined() :: Nil + ) - val constructorDef = js.JSMethodDef( - js.MemberFlags.empty, - js.StringLiteral("constructor"), + val constructorDef = js.JSConstructorDef( + js.MemberFlags.empty.withNamespace(js.MemberNamespace.Constructor), formalArgs, restParam, constructorBody)(OptimizerHints.empty, None) (jsClassCaptures, constructorDef) @@ -1150,7 +1158,8 @@ class JSCodeGen()(using genCtx: Context) { assert(jsSuperCall.isDefined, s"Did not find Super call in primary JS construtor at ${dd.sourcePos}") - new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), jsSuperCall.get :: jsStats.result()) + new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), + js.JSConstructorBody(Nil, jsSuperCall.get, jsStats.result())(dd.span)) } private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = { @@ -1251,9 +1260,9 @@ class JSCodeGen()(using genCtx: Context) { (jsExport, jsClassCaptures) } - /** generates a sequence of JS constructor statements based on a constructor tree. */ - private def genJSClassCtorStats(overloadVar: js.VarRef, - ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.Tree = { + /** Generates a JS constructor body based on a constructor tree. */ + private def genJSClassCtorBody(overloadVar: js.VarRef, + ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.JSConstructorBody = { /* generates a statement that conditionally executes body iff the chosen * overload is any of the descendants of `tree` (including itself). @@ -1348,13 +1357,19 @@ class JSCodeGen()(using genCtx: Context) { val primaryCtor = ctorTree.ctor val secondaryCtorTrees = ctorTree.subCtors - js.Block( - secondaryCtorTrees.map(preStats(_, primaryCtor.paramsAndInfo)) ++ - primaryCtor.body ++ + wrapJSCtorBody( + secondaryCtorTrees.map(preStats(_, primaryCtor.paramsAndInfo)), + primaryCtor.body, secondaryCtorTrees.map(postStats(_)) ) } + private def wrapJSCtorBody(before: List[js.Tree], body: js.JSConstructorBody, + after: List[js.Tree]): js.JSConstructorBody = { + js.JSConstructorBody(before ::: body.beforeSuper, body.superCall, + body.afterSuper ::: after)(body.pos) + } + private sealed trait JSCtor { val sym: Symbol val paramsAndInfo: List[(Symbol, JSParamInfo)] @@ -1362,7 +1377,7 @@ class JSCodeGen()(using genCtx: Context) { private class PrimaryJSCtor(val sym: Symbol, val paramsAndInfo: List[(Symbol, JSParamInfo)], - val body: List[js.Tree]) extends JSCtor + val body: js.JSConstructorBody) extends JSCtor private class SplitSecondaryJSCtor(val sym: Symbol, val paramsAndInfo: List[(Symbol, JSParamInfo)], @@ -1945,9 +1960,9 @@ class JSCodeGen()(using genCtx: Context) { }*/ thisLocalVarIdent.fold[js.Tree] { - js.This()(currentClassType) + js.This()(currentThisType) } { thisLocalIdent => - js.VarRef(thisLocalIdent)(currentClassType) + js.VarRef(thisLocalIdent)(currentThisType) } } @@ -2014,9 +2029,7 @@ class JSCodeGen()(using genCtx: Context) { val (exceptValDef, exceptVar) = if (mightCatchJavaScriptException) { val valDef = js.VarDef(freshLocalIdent("e"), NoOriginalName, - encodeClassType(defn.ThrowableClass), mutable = false, { - genModuleApplyMethod(jsdefn.Runtime_wrapJavaScriptException, origExceptVar :: Nil) - }) + encodeClassType(defn.ThrowableClass), mutable = false, js.WrapAsThrowable(origExceptVar)) (valDef, valDef.ref) } else { (js.Skip(), origExceptVar) @@ -2307,7 +2320,7 @@ class JSCodeGen()(using genCtx: Context) { val privateFieldDefs = mutable.ListBuffer.empty[js.FieldDef] val classDefMembers = mutable.ListBuffer.empty[js.MemberDef] val instanceMembers = mutable.ListBuffer.empty[js.MemberDef] - var constructor: Option[js.JSMethodDef] = None + var constructor: Option[js.JSConstructorDef] = None originalClassDef.memberDefs.foreach { case fdef: js.FieldDef => @@ -2321,17 +2334,13 @@ class JSCodeGen()(using genCtx: Context) { "Non-static, unexported method in non-native JS class") classDefMembers += mdef - case mdef: js.JSMethodDef => - mdef.name match { - case js.StringLiteral("constructor") => - assert(!mdef.flags.namespace.isStatic, "Exported static method") - assert(constructor.isEmpty, "two ctors in class") - constructor = Some(mdef) + case cdef: js.JSConstructorDef => + assert(constructor.isEmpty, "two ctors in class") + constructor = Some(cdef) - case _ => - assert(!mdef.flags.namespace.isStatic, "Exported static method") - instanceMembers += mdef - } + case mdef: js.JSMethodDef => + assert(!mdef.flags.namespace.isStatic, "Exported static method") + instanceMembers += mdef case property: js.JSPropertyDef => instanceMembers += property @@ -2361,7 +2370,7 @@ class JSCodeGen()(using genCtx: Context) { val jsClassCaptures = originalClassDef.jsClassCaptures.getOrElse { throw new AssertionError(s"no class captures for anonymous JS class at $pos") } - val js.JSMethodDef(_, _, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { + val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { throw new AssertionError("No ctor found") } assert(ctorParams.isEmpty && ctorRestParam.isEmpty, @@ -2396,6 +2405,9 @@ class JSCodeGen()(using genCtx: Context) { case mdef: js.MethodDef => throw new AssertionError("unexpected MethodDef") + case cdef: js.JSConstructorDef => + throw new AssertionError("unexpected JSConstructorDef") + case mdef: js.JSMethodDef => implicit val pos = mdef.pos val impl = memberLambda(mdef.args, mdef.restParam, mdef.body) @@ -2468,36 +2480,43 @@ class JSCodeGen()(using genCtx: Context) { } // Transform the constructor body. - val inlinedCtorStats = new ir.Transformers.Transformer { - override def transform(tree: js.Tree, isStat: Boolean): js.Tree = tree match { - // The super constructor call. Transform this into a simple new call. - case js.JSSuperConstructorCall(args) => - implicit val pos = tree.pos - - val newTree = { - val ident = originalClassDef.superClass.getOrElse(throw new FatalError("No superclass")) - if (args.isEmpty && ident.name == JSObjectClassName) - js.JSObjectConstr(Nil) - else - js.JSNew(jsSuperClassRef, args) - } + val inlinedCtorStats: List[js.Tree] = { + val beforeSuper = ctorBody.beforeSuper - js.Block( - js.VarDef(selfName, thisOriginalName, jstpe.AnyType, mutable = false, newTree) :: - memberDefinitions) - - case js.This() => - selfRef(tree.pos) + val superCall = { + implicit val pos = ctorBody.superCall.pos + val js.JSSuperConstructorCall(args) = ctorBody.superCall - // Don't traverse closure boundaries - case closure: js.Closure => - val newCaptureValues = closure.captureValues.map(transformExpr) - closure.copy(captureValues = newCaptureValues)(closure.pos) + val newTree = { + val ident = originalClassDef.superClass.getOrElse(throw new FatalError("No superclass")) + if (args.isEmpty && ident.name == JSObjectClassName) + js.JSObjectConstr(Nil) + else + js.JSNew(jsSuperClassRef, args) + } - case tree => - super.transform(tree, isStat) + val selfVarDef = js.VarDef(selfName, thisOriginalName, jstpe.AnyType, mutable = false, newTree) + selfVarDef :: memberDefinitions } - }.transform(ctorBody, isStat = true) + + // After the super call, substitute `selfRef` for `This()` + val afterSuper = new ir.Transformers.Transformer { + override def transform(tree: js.Tree, isStat: Boolean): js.Tree = tree match { + case js.This() => + selfRef(tree.pos) + + // Don't traverse closure boundaries + case closure: js.Closure => + val newCaptureValues = closure.captureValues.map(transformExpr) + closure.copy(captureValues = newCaptureValues)(closure.pos) + + case tree => + super.transform(tree, isStat) + } + }.transformStats(ctorBody.afterSuper) + + beforeSuper ::: superCall ::: afterSuper + } val closure = js.Closure(arrow = true, jsClassCaptures, Nil, None, js.Block(inlinedCtorStats, selfRef), jsSuperClassValue :: args) @@ -2926,7 +2945,7 @@ class JSCodeGen()(using genCtx: Context) { case defn.ArrayOf(el) => el case JavaArrayType(el) => el case tpe => - val msg = ex"expected Array $tpe" + val msg = em"expected Array $tpe" report.error(msg) ErrorType(msg) } @@ -2989,14 +3008,12 @@ class JSCodeGen()(using genCtx: Context) { implicit val pos: SourcePosition = tree.sourcePos val exception = args.head val genException = genExpr(exception) - js.Throw { - if (exception.tpe.typeSymbol.derivesFrom(jsdefn.JavaScriptExceptionClass)) { - genModuleApplyMethod( - jsdefn.Runtime_unwrapJavaScriptException, - List(genException)) - } else { - genException - } + genException match { + case js.New(cls, _, _) if cls != JavaScriptExceptionClassName => + // Common case where ex is neither null nor a js.JavaScriptException + js.Throw(genException) + case _ => + js.Throw(js.UnwrapFromThrowable(genException)) } } @@ -3515,13 +3532,16 @@ class JSCodeGen()(using genCtx: Context) { val closure = js.Closure(arrow = true, formalCaptures, formalParams, restParam, genBody, actualCaptures) if (!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym)) { - assert(!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym), - s"Invalid functional interface $funInterfaceSym reached the back-end") val formalCount = formalParams.size val cls = ClassName("scala.scalajs.runtime.AnonFunction" + formalCount) val ctorName = MethodName.constructor( jstpe.ClassRef(ClassName("scala.scalajs.js.Function" + formalCount)) :: Nil) js.New(cls, js.MethodIdent(ctorName), List(closure)) + } else if (funInterfaceSym.name == tpnme.FunctionXXL && funInterfaceSym.owner == defn.ScalaRuntimePackageClass) { + val cls = ClassName("scala.scalajs.runtime.AnonFunctionXXL") + val ctorName = MethodName.constructor( + jstpe.ClassRef(ClassName("scala.scalajs.js.Function1")) :: Nil) + js.New(cls, js.MethodIdent(ctorName), List(closure)) } else { assert(funInterfaceSym.isJSType, s"Invalid functional interface $funInterfaceSym reached the back-end") @@ -3652,7 +3672,7 @@ class JSCodeGen()(using genCtx: Context) { } else if (sym.isJSType) { if (sym.is(Trait)) { report.error( - s"isInstanceOf[${sym.fullName}] not supported because it is a JS trait", + em"isInstanceOf[${sym.fullName}] not supported because it is a JS trait", pos) js.BooleanLiteral(true) } else { @@ -3982,6 +4002,53 @@ class JSCodeGen()(using genCtx: Context) { js.JSFunctionApply(fVarDef.ref, List(keyVarRef)) })) + case JS_THROW => + // js.special.throw(arg) + js.Throw(genArgs1) + + case JS_TRY_CATCH => + /* js.special.tryCatch(arg1, arg2) + * + * We must generate: + * + * val body = arg1 + * val handler = arg2 + * try { + * body() + * } catch (e) { + * handler(e) + * } + * + * with temporary vals, because `arg2` must be evaluated before + * `body` executes. Moreover, exceptions thrown while evaluating + * the function values `arg1` and `arg2` must not be caught. + */ + val (arg1, arg2) = genArgs2 + val bodyVarDef = js.VarDef(freshLocalIdent("body"), NoOriginalName, + jstpe.AnyType, mutable = false, arg1) + val handlerVarDef = js.VarDef(freshLocalIdent("handler"), NoOriginalName, + jstpe.AnyType, mutable = false, arg2) + val exceptionVarIdent = freshLocalIdent("e") + val exceptionVarRef = js.VarRef(exceptionVarIdent)(jstpe.AnyType) + js.Block( + bodyVarDef, + handlerVarDef, + js.TryCatch( + js.JSFunctionApply(bodyVarDef.ref, Nil), + exceptionVarIdent, + NoOriginalName, + js.JSFunctionApply(handlerVarDef.ref, List(exceptionVarRef)) + )(jstpe.AnyType) + ) + + case WRAP_AS_THROWABLE => + // js.special.wrapAsThrowable(arg) + js.WrapAsThrowable(genArgs1) + + case UNWRAP_FROM_THROWABLE => + // js.special.unwrapFromThrowable(arg) + js.UnwrapFromThrowable(genArgs1) + case UNION_FROM | UNION_FROM_TYPE_CONSTRUCTOR => /* js.|.from and js.|.fromTypeConstructor * We should not have to deal with those. They have a perfectly valid @@ -4764,6 +4831,7 @@ object JSCodeGen { private val NullPointerExceptionClass = ClassName("java.lang.NullPointerException") private val JSObjectClassName = ClassName("scala.scalajs.js.Object") + private val JavaScriptExceptionClassName = ClassName("scala.scalajs.js.JavaScriptException") private val ObjectClassRef = jstpe.ClassRef(ir.Names.ObjectClass) diff --git a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala index c252ac892548..5336d60129ac 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala @@ -162,10 +162,6 @@ final class JSDefinitions()(using Context) { @threadUnsafe lazy val RuntimePackageVal = requiredPackage("scala.scalajs.runtime") @threadUnsafe lazy val RuntimePackageClass = RuntimePackageVal.moduleClass.asClass - @threadUnsafe lazy val RuntimePackage_wrapJavaScriptExceptionR = RuntimePackageClass.requiredMethodRef("wrapJavaScriptException") - def Runtime_wrapJavaScriptException(using Context) = RuntimePackage_wrapJavaScriptExceptionR.symbol - @threadUnsafe lazy val Runtime_unwrapJavaScriptExceptionR = RuntimePackageClass.requiredMethodRef("unwrapJavaScriptException") - def Runtime_unwrapJavaScriptException(using Context) = Runtime_unwrapJavaScriptExceptionR.symbol @threadUnsafe lazy val Runtime_toScalaVarArgsR = RuntimePackageClass.requiredMethodRef("toScalaVarArgs") def Runtime_toScalaVarArgs(using Context) = Runtime_toScalaVarArgsR.symbol @threadUnsafe lazy val Runtime_toJSVarArgsR = RuntimePackageClass.requiredMethodRef("toJSVarArgs") @@ -206,6 +202,14 @@ final class JSDefinitions()(using Context) { def Special_instanceof(using Context) = Special_instanceofR.symbol @threadUnsafe lazy val Special_strictEqualsR = SpecialPackageClass.requiredMethodRef("strictEquals") def Special_strictEquals(using Context) = Special_strictEqualsR.symbol + @threadUnsafe lazy val Special_throwR = SpecialPackageClass.requiredMethodRef("throw") + def Special_throw(using Context) = Special_throwR.symbol + @threadUnsafe lazy val Special_tryCatchR = SpecialPackageClass.requiredMethodRef("tryCatch") + def Special_tryCatch(using Context) = Special_tryCatchR.symbol + @threadUnsafe lazy val Special_wrapAsThrowableR = SpecialPackageClass.requiredMethodRef("wrapAsThrowable") + def Special_wrapAsThrowable(using Context) = Special_wrapAsThrowableR.symbol + @threadUnsafe lazy val Special_unwrapFromThrowableR = SpecialPackageClass.requiredMethodRef("unwrapFromThrowable") + def Special_unwrapFromThrowable(using Context) = Special_unwrapFromThrowableR.symbol @threadUnsafe lazy val WrappedArrayType: TypeRef = requiredClassRef("scala.scalajs.js.WrappedArray") def WrappedArrayClass(using Context) = WrappedArrayType.symbol.asClass diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index 0884ec19b53e..78412999bb34 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -135,8 +135,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { for ((info, _) <- tups.tail) { report.error( - em"export overload conflicts with export of $firstSym: " + - "a field may not share its exported name with another export", + em"export overload conflicts with export of $firstSym: a field may not share its exported name with another export", info.pos) } @@ -264,8 +263,8 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { .alternatives assert(!alts.isEmpty, - em"Ended up with no alternatives for ${classSym.fullName}::$name. " + - em"Original set was ${alts} with types ${alts.map(_.info)}") + em"""Ended up with no alternatives for ${classSym.fullName}::$name. + |Original set was ${alts} with types ${alts.map(_.info)}""") val (jsName, isProp) = exportNameInfo(name) @@ -309,7 +308,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { if (isProp && methodSyms.nonEmpty) { val firstAlt = alts.head report.error( - i"Conflicting properties and methods for ${classSym.fullName}::$name.", + em"Conflicting properties and methods for ${classSym.fullName}::$name.", firstAlt.srcPos) implicit val pos = firstAlt.span js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None) @@ -613,7 +612,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { val altsTypesInfo = alts.map(_.info.show).sorted.mkString("\n ") report.error( - s"Cannot disambiguate overloads for $fullKind $displayName with types\n $altsTypesInfo", + em"Cannot disambiguate overloads for $fullKind $displayName with types\n $altsTypesInfo", pos) } @@ -650,7 +649,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { js.LoadJSConstructor(encodeClassName(superClassSym)) } - val receiver = js.This()(jstpe.AnyType) + val receiver = js.This()(currentThisType) val nameTree = genExpr(sym.jsName) if (sym.isJSGetter) { @@ -754,7 +753,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { genApplyMethodMaybeStatically(receiver, modAccessor, Nil) } } else { - js.This()(encodeClassType(targetSym)) + js.This()(currentThisType) } } @@ -811,7 +810,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { def receiver = if (static) genLoadModule(sym.owner) - else js.This()(encodeClassType(currentClass)) + else js.This()(currentThisType) def boxIfNeeded(call: js.Tree): js.Tree = box(call, atPhase(elimErasedValueTypePhase)(sym.info.resultType)) diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala index 9b19e66058e8..2fd007165952 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala @@ -6,6 +6,7 @@ import java.net.{URI, URISyntaxException} import dotty.tools.dotc.core._ import Contexts._ +import Decorators.em import dotty.tools.dotc.report @@ -31,7 +32,7 @@ class JSPositions()(using Context) { URIMap(from, to) :: Nil } catch { case e: URISyntaxException => - report.error(s"${e.getInput} is not a valid URI") + report.error(em"${e.getInput} is not a valid URI") Nil } } diff --git a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala index 6b3854ed677f..029273aed54b 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala @@ -5,6 +5,7 @@ import Names.TermName import Types._ import Contexts._ import Symbols._ +import Decorators.em import dotty.tools.dotc.ast.tpd._ import dotty.tools.backend.jvm.DottyPrimitives @@ -36,12 +37,16 @@ object JSPrimitives { inline val LINKING_INFO = WITH_CONTEXTUAL_JS_CLASS_VALUE + 1 // runtime.linkingInfo inline val DYNAMIC_IMPORT = LINKING_INFO + 1 // runtime.dynamicImport - inline val STRICT_EQ = DYNAMIC_IMPORT + 1 // js.special.strictEquals - inline val IN = STRICT_EQ + 1 // js.special.in - inline val INSTANCEOF = IN + 1 // js.special.instanceof - inline val DELETE = INSTANCEOF + 1 // js.special.delete - inline val FORIN = DELETE + 1 // js.special.forin - inline val DEBUGGER = FORIN + 1 // js.special.debugger + inline val STRICT_EQ = DYNAMIC_IMPORT + 1 // js.special.strictEquals + inline val IN = STRICT_EQ + 1 // js.special.in + inline val INSTANCEOF = IN + 1 // js.special.instanceof + inline val DELETE = INSTANCEOF + 1 // js.special.delete + inline val FORIN = DELETE + 1 // js.special.forin + inline val JS_THROW = FORIN + 1 // js.special.throw + inline val JS_TRY_CATCH = JS_THROW + 1 // js.special.tryCatch + inline val WRAP_AS_THROWABLE = JS_TRY_CATCH + 1 // js.special.wrapAsThrowable + inline val UNWRAP_FROM_THROWABLE = WRAP_AS_THROWABLE + 1 // js.special.unwrapFromThrowable + inline val DEBUGGER = UNWRAP_FROM_THROWABLE + 1 // js.special.debugger inline val THROW = DEBUGGER + 1 @@ -90,7 +95,7 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { val alts = cls.info.member(method).alternatives.map(_.symbol) if (alts.isEmpty) { - report.error(s"Unknown primitive method $cls.$method") + report.error(em"Unknown primitive method $cls.$method") } else { for (s <- alts) addPrimitive(s, code) @@ -125,6 +130,10 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { addPrimitive(jsdefn.Special_instanceof, INSTANCEOF) addPrimitive(jsdefn.Special_delete, DELETE) addPrimitive(jsdefn.Special_forin, FORIN) + addPrimitive(jsdefn.Special_throw, JS_THROW) + addPrimitive(jsdefn.Special_tryCatch, JS_TRY_CATCH) + addPrimitive(jsdefn.Special_wrapAsThrowable, WRAP_AS_THROWABLE) + addPrimitive(jsdefn.Special_unwrapFromThrowable, UNWRAP_FROM_THROWABLE) addPrimitive(jsdefn.Special_debugger, DEBUGGER) addPrimitive(defn.throwMethod, THROW) diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala index c9c032b0ae7d..5f5e9fc799b5 100644 --- a/compiler/src/dotty/tools/dotc/Bench.scala +++ b/compiler/src/dotty/tools/dotc/Bench.scala @@ -14,24 +14,22 @@ import scala.annotation.internal.sharable object Bench extends Driver: @sharable private var numRuns = 1 - - private def ntimes(n: Int)(op: => Reporter): Reporter = - (0 until n).foldLeft(emptyReporter)((_, _) => op) - + @sharable private var numCompilers = 1 + @sharable private var waitAfter = -1 + @sharable private var curCompiler = 0 @sharable private var times: Array[Int] = _ override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = - times = new Array[Int](numRuns) var reporter: Reporter = emptyReporter for i <- 0 until numRuns do + val curRun = curCompiler * numRuns + i val start = System.nanoTime() reporter = super.doCompile(compiler, files) - times(i) = ((System.nanoTime - start) / 1000000).toInt - println(s"time elapsed: ${times(i)}ms") - if ctx.settings.Xprompt.value then + times(curRun) = ((System.nanoTime - start) / 1000000).toInt + println(s"time elapsed: ${times(curRun)}ms") + if ctx.settings.Xprompt.value || waitAfter == curRun + 1 then print("hit to continue >") System.in.nn.read() - println() reporter def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = { @@ -42,20 +40,26 @@ object Bench extends Driver: def reportTimes() = val best = times.sorted - val measured = numRuns / 3 + val measured = numCompilers * numRuns / 3 val avgBest = best.take(measured).sum / measured val avgLast = times.reverse.take(measured).sum / measured - println(s"best out of $numRuns runs: ${best(0)}") + println(s"best out of ${numCompilers * numRuns} runs: ${best(0)}") println(s"average out of best $measured: $avgBest") println(s"average out of last $measured: $avgLast") - override def process(args: Array[String], rootCtx: Context): Reporter = + override def process(args: Array[String]): Reporter = val (numCompilers, args1) = extractNumArg(args, "#compilers") val (numRuns, args2) = extractNumArg(args1, "#runs") + val (waitAfter, args3) = extractNumArg(args2, "#wait-after", -1) + this.numCompilers = numCompilers this.numRuns = numRuns + this.waitAfter = waitAfter + this.times = new Array[Int](numCompilers * numRuns) var reporter: Reporter = emptyReporter - for i <- 0 until numCompilers do - reporter = super.process(args2, rootCtx) + curCompiler = 0 + while curCompiler < numCompilers do + reporter = super.process(args3) + curCompiler += 1 reportTimes() reporter diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 44ca582c3c61..8415646eb16c 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -16,6 +16,8 @@ import core.Decorators._ import config.{SourceVersion, Feature} import StdNames.nme import scala.annotation.internal.sharable +import scala.util.control.NoStackTrace +import transform.MacroAnnotations class CompilationUnit protected (val source: SourceFile) { @@ -45,6 +47,8 @@ class CompilationUnit protected (val source: SourceFile) { */ var needsInlining: Boolean = false + var hasMacroAnnotations: Boolean = false + /** Set to `true` if inliner added anonymous mirrors that need to be completed */ var needsMirrorSupport: Boolean = false @@ -102,7 +106,7 @@ class CompilationUnit protected (val source: SourceFile) { object CompilationUnit { - class SuspendException extends Exception + class SuspendException extends Exception with NoStackTrace /** Make a compilation unit for top class `clsd` with the contents of the `unpickled` tree */ def apply(clsd: ClassDenotation, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = @@ -119,6 +123,7 @@ object CompilationUnit { force.traverse(unit1.tpdTree) unit1.needsStaging = force.containsQuote unit1.needsInlining = force.containsInline + unit1.hasMacroAnnotations = force.containsMacroAnnotation } unit1 } @@ -131,11 +136,11 @@ object CompilationUnit { if (!mustExist) source else if (source.file.isDirectory) { - report.error(s"expected file, received directory '${source.file.path}'") + report.error(em"expected file, received directory '${source.file.path}'") NoSource } else if (!source.file.exists) { - report.error(s"source file not found: ${source.file.path}") + report.error(em"source file not found: ${source.file.path}") NoSource } else source @@ -147,12 +152,15 @@ object CompilationUnit { var containsQuote = false var containsInline = false var containsCaptureChecking = false + var containsMacroAnnotation = false def traverse(tree: Tree)(using Context): Unit = { - if (tree.symbol.isQuote) - containsQuote = true if tree.symbol.is(Flags.Inline) then containsInline = true tree match + case _: tpd.Quote => + containsQuote = true + case tree: tpd.Apply if tree.symbol == defn.QuotedTypeModule_of => + containsQuote = true case Import(qual, selectors) => tpd.languageImport(qual) match case Some(prefix) => @@ -160,6 +168,9 @@ object CompilationUnit { Feature.handleGlobalLanguageImport(prefix, imported) case _ => case _ => + for annot <- tree.symbol.annotations do + if MacroAnnotations.isMacroAnnotation(annot) then + ctx.compilationUnit.hasMacroAnnotations = true traverseChildren(tree) } } diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index b121a47781e1..a6118732d4ae 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -35,6 +35,7 @@ class Compiler { protected def frontendPhases: List[List[Phase]] = List(new Parser) :: // Compiler frontend: scanner, parser List(new TyperPhase) :: // Compiler frontend: namer, typer + List(new CheckUnused.PostTyper) :: // Check for unused elements List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files @@ -49,6 +50,7 @@ class Compiler { List(new Pickler) :: // Generate TASTY info List(new Inlining) :: // Inline and execute macros List(new PostInlining) :: // Add mirror support for inlined code + List(new CheckUnused.PostInlining) :: // Check for unused elements List(new Staging) :: // Check staging levels and heal staged types List(new Splicing) :: // Replace level 1 splices with holes List(new PickleQuotes) :: // Turn quoted trees into explicit run-time data structures @@ -57,7 +59,8 @@ class Compiler { /** Phases dealing with the transformation from pickled trees to backend trees */ protected def transformPhases: List[List[Phase]] = List(new InstrumentCoverage) :: // Perform instrumentation for code coverage (if -coverage-out is set) - List(new FirstTransform, // Some transformations to put trees into a canonical form + List(new CrossVersionChecks, // Check issues related to deprecated and experimental + new FirstTransform, // Some transformations to put trees into a canonical form new CheckReentrant, // Internal use only: Check that compiled program has no data races involving global vars new ElimPackagePrefixes, // Eliminate references to package prefixes in Select nodes new CookComments, // Cook the comments: expand variables, doc, etc. @@ -69,8 +72,7 @@ class Compiler { new ElimRepeated, // Rewrite vararg parameters and arguments new RefChecks) :: // Various checks mostly related to abstract members and overriding List(new init.Checker) :: // Check initialization of objects - List(new CrossVersionChecks, // Check issues related to deprecated and experimental - new ProtectedAccessors, // Add accessors for protected members + List(new ProtectedAccessors, // Add accessors for protected members new ExtensionMethods, // Expand methods of value classes with extension methods new UncacheGivenAliases, // Avoid caching RHS of simple parameterless given aliases new ElimByName, // Map by-name parameters to functions @@ -87,7 +89,8 @@ class Compiler { new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) new ExplicitOuter, // Add accessors to outer classes from nested ones. new ExplicitSelf, // Make references to non-trivial self types explicit as casts - new StringInterpolatorOpt) :: // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats + new StringInterpolatorOpt, // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats + new DropBreaks) :: // Optimize local Break throws by rewriting them List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions new UninitializedDefs, // Replaces `compiletime.uninitialized` by `_` new InlinePatterns, // Remove placeholders of inlined patterns diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 14a71463c66d..e548cae55ddd 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -30,18 +30,20 @@ class Driver { protected def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = if files.nonEmpty then + var runOrNull = ctx.run try val run = compiler.newRun + runOrNull = run run.compile(files) finish(compiler, run) catch case ex: FatalError => report.error(ex.getMessage.nn) // signals that we should fail compilation. - case ex: TypeError => - println(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}") + case ex: TypeError if !runOrNull.enrichedErrorMessage => + println(runOrNull.enrichErrorMessage(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}")) throw ex - case ex: Throwable => - println(s"$ex while compiling ${files.map(_.path).mkString(", ")}") + case ex: Throwable if !runOrNull.enrichedErrorMessage => + println(runOrNull.enrichErrorMessage(s"Exception while compiling ${files.map(_.path).mkString(", ")}")) throw ex ctx.reporter @@ -94,7 +96,7 @@ class Driver { val newEntries: List[String] = files .flatMap { file => if !file.exists then - report.error(s"File does not exist: ${file.path}") + report.error(em"File does not exist: ${file.path}") None else file.extension match case "jar" => Some(file.path) @@ -102,10 +104,10 @@ class Driver { TastyFileUtil.getClassPath(file) match case Some(classpath) => Some(classpath) case _ => - report.error(s"Could not load classname from: ${file.path}") + report.error(em"Could not load classname from: ${file.path}") None case _ => - report.error(s"File extension is not `tasty` or `jar`: ${file.path}") + report.error(em"File extension is not `tasty` or `jar`: ${file.path}") None } .distinct @@ -171,7 +173,7 @@ class Driver { * the other overloads without worrying about breaking compatibility * with sbt. */ - final def process(args: Array[String]): Reporter = + def process(args: Array[String]): Reporter = process(args, null: Reporter | Null, null: interfaces.CompilerCallback | Null) /** Entry point to the compiler using a custom `Context`. diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 022ffbed5408..944ae794c94f 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -164,19 +164,23 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint private var finalizeActions = mutable.ListBuffer[() => Unit]() /** Will be set to true if any of the compiled compilation units contains - * a pureFunctions or captureChecking language import. + * a pureFunctions language import. */ var pureFunsImportEncountered = false + /** Will be set to true if any of the compiled compilation units contains + * a captureChecking language import. + */ + var ccImportEncountered = false + + private var myEnrichedErrorMessage = false + def compile(files: List[AbstractFile]): Unit = - try - val sources = files.map(runContext.getSource(_)) - compileSources(sources) - catch - case NonFatal(ex) => - if units.nonEmpty then report.echo(i"exception occurred while compiling $units%, %") - else report.echo(s"exception occurred while compiling ${files.map(_.name).mkString(", ")}") - throw ex + try compileSources(files.map(runContext.getSource(_))) + catch case NonFatal(ex) if !this.enrichedErrorMessage => + val files1 = if units.isEmpty then files else units.map(_.source.file) + report.echo(this.enrichErrorMessage(s"exception occurred while compiling ${files1.map(_.path)}")) + throw ex /** TODO: There's a fundamental design problem here: We assemble phases using `fusePhases` * when we first build the compiler. But we modify them with -Yskip, -Ystop @@ -226,9 +230,13 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value) ctx.base.usePhases(phases) + if ctx.settings.YnoDoubleBindings.value then + ctx.base.checkNoDoubleBindings = true + def runPhases(using Context) = { var lastPrintedTree: PrintedTree = NoPrintedTree val profiler = ctx.profiler + var phasesWereAdjusted = false for (phase <- ctx.base.allPhases) if (phase.isRunnable) @@ -247,6 +255,11 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) ctx.typerState.gc() } + if !phasesWereAdjusted then + phasesWereAdjusted = true + if !Feature.ccEnabledSomewhere then + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) profiler.finished() } @@ -384,3 +397,16 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint given runContext[Dummy_so_its_a_def]: Context = myCtx.nn assert(runContext.runId <= Periods.MaxPossibleRunId) } + +object Run { + extension (run: Run | Null) + def enrichedErrorMessage: Boolean = if run == null then false else run.myEnrichedErrorMessage + def enrichErrorMessage(errorMessage: String)(using Context): String = + if run == null then + report.enrichErrorMessage(errorMessage) + else if !run.enrichedErrorMessage then + run.myEnrichedErrorMessage = true + report.enrichErrorMessage(errorMessage) + else + errorMessage +} diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 1e1db19bcf25..f0580c29e762 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -6,6 +6,7 @@ import core._ import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._ import Symbols._, StdNames._, Trees._, ContextOps._ import Decorators._, transform.SymUtils._ +import Annotations.Annotation import NameKinds.{UniqueName, EvidenceParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, Chars} @@ -117,7 +118,7 @@ object desugar { if (local.exists) (defctx.owner.thisType select local).dealiasKeepAnnots else { def msg = - s"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" + em"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" ErrorType(msg).assertingErrorsReported(msg) } case _ => @@ -165,32 +166,41 @@ object desugar { * * Generate setter where needed */ - def valDef(vdef0: ValDef)(using Context): Tree = { + def valDef(vdef0: ValDef)(using Context): Tree = val vdef @ ValDef(_, tpt, rhs) = vdef0 - val mods = vdef.mods - val valName = normalizeName(vdef, tpt).asTermName - val vdef1 = cpy.ValDef(vdef)(name = valName) + var mods1 = vdef.mods + + def dropInto(tpt: Tree): Tree = tpt match + case Into(tpt1) => + mods1 = vdef.mods.withAddedAnnotation( + TypedSplice( + Annotation(defn.AllowConversionsAnnot, tpt.span.startPos).tree)) + tpt1 + case ByNameTypeTree(tpt1) => + cpy.ByNameTypeTree(tpt)(dropInto(tpt1)) + case PostfixOp(tpt1, op) if op.name == tpnme.raw.STAR => + cpy.PostfixOp(tpt)(dropInto(tpt1), op) + case _ => + tpt + + val vdef1 = cpy.ValDef(vdef)(name = valName, tpt = dropInto(tpt)) + .withMods(mods1) - if (isSetterNeeded(vdef)) { - // TODO: copy of vdef as getter needed? - // val getter = ValDef(mods, name, tpt, rhs) withPos vdef.pos? - // right now vdef maps via expandedTree to a thicket which concerns itself. - // I don't see a problem with that but if there is one we can avoid it by making a copy here. + if isSetterNeeded(vdef) then val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) // The rhs gets filled in later, when field is generated and getter has parameters (see Memoize miniphase) val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral val setter = cpy.DefDef(vdef)( - name = valName.setterName, - paramss = (setterParam :: Nil) :: Nil, - tpt = TypeTree(defn.UnitType), - rhs = setterRhs - ).withMods((mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) - .dropEndMarker() // the end marker should only appear on the getter definition + name = valName.setterName, + paramss = (setterParam :: Nil) :: Nil, + tpt = TypeTree(defn.UnitType), + rhs = setterRhs + ).withMods((vdef.mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) + .dropEndMarker() // the end marker should only appear on the getter definition Thicket(vdef1, setter) - } else vdef1 - } + end valDef def makeImplicitParameters(tpts: List[Tree], implicitFlag: FlagSet, forPrimaryConstructor: Boolean = false)(using Context): List[ValDef] = for (tpt <- tpts) yield { @@ -328,9 +338,9 @@ object desugar { def quotedPattern(tree: untpd.Tree, expectedTpt: untpd.Tree)(using Context): untpd.Tree = { def adaptToExpectedTpt(tree: untpd.Tree): untpd.Tree = tree match { // Add the expected type as an ascription - case _: untpd.Splice => + case _: untpd.SplicePattern => untpd.Typed(tree, expectedTpt).withSpan(tree.span) - case Typed(expr: untpd.Splice, tpt) => + case Typed(expr: untpd.SplicePattern, tpt) => cpy.Typed(tree)(expr, untpd.makeAndType(tpt, expectedTpt).withSpan(tpt.span)) // Propagate down the expected type to the leafs of the expression @@ -905,16 +915,16 @@ object desugar { name = normalizeName(mdef, mdef.tpt).asTermName, paramss = if mdef.name.isRightAssocOperatorName then - val (typaramss, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters + val (rightTyParams, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters paramss match - case params :: paramss1 => // `params` must have a single parameter and without `given` flag + case rightParam :: paramss1 => // `rightParam` must have a single parameter and without `given` flag def badRightAssoc(problem: String) = - report.error(i"right-associative extension method $problem", mdef.srcPos) + report.error(em"right-associative extension method $problem", mdef.srcPos) extParamss ++ mdef.paramss - params match + rightParam match case ValDefs(vparam :: Nil) => if !vparam.mods.is(Given) then // we merge the extension parameters with the method parameters, @@ -924,8 +934,10 @@ object desugar { // def %:[E](f: F)(g: G)(using H): Res = ??? // will be encoded as // def %:[A](using B)[E](f: F)(c: C)(using D)(g: G)(using H): Res = ??? - val (leadingUsing, otherExtParamss) = extParamss.span(isUsingOrTypeParamClause) - leadingUsing ::: typaramss ::: params :: otherExtParamss ::: paramss1 + // + // If you change the names of the clauses below, also change them in right-associative-extension-methods.md + val (leftTyParamsAndLeadingUsing, leftParamAndTrailingUsing) = extParamss.span(isUsingOrTypeParamClause) + leftTyParamsAndLeadingUsing ::: rightTyParams ::: rightParam :: leftParamAndTrailingUsing ::: paramss1 else badRightAssoc("cannot start with using clause") case _ => @@ -1137,7 +1149,7 @@ object desugar { def errorOnGivenBinding(bind: Bind)(using Context): Boolean = report.error( em"""${hl("given")} patterns are not allowed in a ${hl("val")} definition, - |please bind to an identifier and use an alias given.""".stripMargin, bind) + |please bind to an identifier and use an alias given.""", bind) false def isTuplePattern(arity: Int): Boolean = pat match { @@ -1237,7 +1249,7 @@ object desugar { def checkOpaqueAlias(tree: MemberDef)(using Context): MemberDef = def check(rhs: Tree): MemberDef = rhs match case bounds: TypeBoundsTree if bounds.alias.isEmpty => - report.error(i"opaque type must have a right-hand side", tree.srcPos) + report.error(em"opaque type must have a right-hand side", tree.srcPos) tree.withMods(tree.mods.withoutFlags(Opaque)) case LambdaTypeTree(_, body) => check(body) case _ => tree @@ -1454,7 +1466,10 @@ object desugar { val param = makeSyntheticParameter( tpt = if params.exists(_.tpt.isEmpty) then TypeTree() - else Tuple(params.map(_.tpt))) + else Tuple(params.map(_.tpt)), + flags = + if params.nonEmpty && params.head.mods.is(Given) then SyntheticTermParam | Given + else SyntheticTermParam) def selector(n: Int) = if (isGenericTuple) Apply(Select(refOfDef(param), nme.apply), Literal(Constant(n))) else Select(refOfDef(param), nme.selectorName(n)) @@ -1483,10 +1498,10 @@ object desugar { case vd: ValDef => vd } - def makeContextualFunction(formals: List[Tree], body: Tree, isErased: Boolean)(using Context): Function = { - val mods = if (isErased) Given | Erased else Given + def makeContextualFunction(formals: List[Tree], body: Tree, erasedParams: List[Boolean])(using Context): Function = { + val mods = Given val params = makeImplicitParameters(formals, mods) - FunctionWithMods(params, body, Modifiers(mods)) + FunctionWithMods(params, body, Modifiers(mods), erasedParams) } private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { @@ -1717,7 +1732,7 @@ object desugar { val applyVParams = vargs.zipWithIndex.map { case (p: ValDef, _) => p.withAddedFlags(mods.flags) - case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags) + case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags.toTermFlags) } RefinedTypeTree(polyFunctionTpt, List( DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) @@ -1809,16 +1824,7 @@ object desugar { flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) case ext: ExtMethods => Block(List(ext), Literal(Constant(())).withSpan(ext.span)) - case CapturingTypeTree(refs, parent) => - // convert `{refs} T` to `T @retains refs` - // `{refs}-> T` to `-> (T @retainsByName refs)` - def annotate(annotName: TypeName, tp: Tree) = - Annotated(tp, New(scalaAnnotationDot(annotName), List(refs))) - parent match - case ByNameTypeTree(restpt) => - cpy.ByNameTypeTree(parent)(annotate(tpnme.retainsByName, restpt)) - case _ => - annotate(tpnme.retains, parent) + case f: FunctionWithMods if f.hasErasedParams => makeFunctionWithValDefs(f, pt) } desugared.withSpan(tree.span) } @@ -1894,6 +1900,28 @@ object desugar { TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) } + /** Ensure the given function tree use only ValDefs for parameters. + * For example, + * FunctionWithMods(List(TypeTree(A), TypeTree(B)), body, mods, erasedParams) + * gets converted to + * FunctionWithMods(List(ValDef(x$1, A), ValDef(x$2, B)), body, mods, erasedParams) + */ + def makeFunctionWithValDefs(tree: Function, pt: Type)(using Context): Function = { + val Function(args, result) = tree + args match { + case (_ : ValDef) :: _ => tree // ValDef case can be easily handled + case _ if !ctx.mode.is(Mode.Type) => tree + case _ => + val applyVParams = args.zipWithIndex.map { + case (p, n) => makeSyntheticParameter(n + 1, p) + } + tree match + case tree: FunctionWithMods => + untpd.FunctionWithMods(applyVParams, result, tree.mods, tree.erasedParams) + case _ => untpd.Function(applyVParams, result) + } + } + /** Returns list of all pattern variables, possibly with their types, * without duplicates */ @@ -1948,15 +1976,13 @@ object desugar { trees foreach collect case Block(Nil, expr) => collect(expr) - case Quote(expr) => + case Quote(body, _) => new UntypedTreeTraverser { def traverse(tree: untpd.Tree)(using Context): Unit = tree match { - case Splice(expr) => collect(expr) + case SplicePattern(body, _) => collect(body) case _ => traverseChildren(tree) } - }.traverse(expr) - case CapturingTypeTree(refs, parent) => - collect(parent) + }.traverse(body) case _ => } collect(tree) diff --git a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala index 096a885dcf32..a1c3c0ed0775 100644 --- a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala +++ b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala @@ -75,8 +75,8 @@ object DesugarEnums { def problem = if (!tparam.isOneOf(VarianceFlags)) "is invariant" else "has bounds that depend on a type parameter in the same parameter list" - errorType(i"""cannot determine type argument for enum parent $enumClass, - |type parameter $tparam $problem""", ctx.source.atSpan(span)) + errorType(em"""cannot determine type argument for enum parent $enumClass, + |type parameter $tparam $problem""", ctx.source.atSpan(span)) } } TypeTree(enumClass.typeRef.appliedTo(targs)).withSpan(span) @@ -216,7 +216,7 @@ object DesugarEnums { case Ident(name) => val matches = tparamNames.contains(name) if (matches && (caseTypeParams.nonEmpty || vparamss.isEmpty)) - report.error(i"illegal reference to type parameter $name from enum case", tree.srcPos) + report.error(em"illegal reference to type parameter $name from enum case", tree.srcPos) matches case LambdaTypeTree(lambdaParams, body) => underBinders(lambdaParams, foldOver(x, tree)) diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala index 040582476e96..c0cf2c0d1b81 100644 --- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala +++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala @@ -56,7 +56,7 @@ object MainProxies { def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = if (mt.isImplicitMethod) { - report.error(s"@main method cannot have implicit parameters", pos) + report.error(em"@main method cannot have implicit parameters", pos) call } else { @@ -74,7 +74,7 @@ object MainProxies { mt.resType match { case restpe: MethodType => if (mt.paramInfos.lastOption.getOrElse(NoType).isRepeatedParam) - report.error(s"varargs parameter of @main method must come last", pos) + report.error(em"varargs parameter of @main method must come last", pos) addArgs(call1, restpe, idx + args.length) case _ => call1 @@ -83,7 +83,7 @@ object MainProxies { var result: List[TypeDef] = Nil if (!mainFun.owner.isStaticOwner) - report.error(s"@main method is not statically accessible", pos) + report.error(em"@main method is not statically accessible", pos) else { var call = ref(mainFun.termRef) mainFun.info match { @@ -91,9 +91,9 @@ object MainProxies { case mt: MethodType => call = addArgs(call, mt, 0) case _: PolyType => - report.error(s"@main method cannot have type parameters", pos) + report.error(em"@main method cannot have type parameters", pos) case _ => - report.error(s"@main can only annotate a method", pos) + report.error(em"@main can only annotate a method", pos) } val errVar = Ident(nme.error) val handler = CaseDef( @@ -203,7 +203,7 @@ object MainProxies { )) (sym, paramAnnotations.toVector, defaultValueSymbols(scope, sym), stat.rawComment) :: Nil case mainAnnot :: others => - report.error(s"method cannot have multiple main annotations", mainAnnot.tree) + report.error(em"method cannot have multiple main annotations", mainAnnot.tree) Nil } case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => @@ -379,26 +379,26 @@ object MainProxies { end generateMainClass if (!mainFun.owner.isStaticOwner) - report.error(s"main method is not statically accessible", pos) + report.error(em"main method is not statically accessible", pos) None else mainFun.info match { case _: ExprType => Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) case mt: MethodType => if (mt.isImplicitMethod) - report.error(s"main method cannot have implicit parameters", pos) + report.error(em"main method cannot have implicit parameters", pos) None else mt.resType match case restpe: MethodType => - report.error(s"main method cannot be curried", pos) + report.error(em"main method cannot be curried", pos) None case _ => Some(generateMainClass(unitToValue(Apply(ref(mainFun.termRef), argRefs(mt))), argValDefs(mt), parameterInfos(mt))) case _: PolyType => - report.error(s"main method cannot have type parameters", pos) + report.error(em"main method cannot have type parameters", pos) None case _ => - report.error(s"main can only annotate a method", pos) + report.error(em"main can only annotate a method", pos) None } } diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index 054ffe66f323..ace396d1e583 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -4,7 +4,7 @@ package ast import core.Contexts._ import core.Decorators._ import util.Spans._ -import Trees.{MemberDef, DefTree, WithLazyField} +import Trees.{MemberDef, DefTree, WithLazyFields} import dotty.tools.dotc.core.Types.AnnotatedType import dotty.tools.dotc.core.Types.ImportType import dotty.tools.dotc.core.Types.Type @@ -106,16 +106,14 @@ object NavigateAST { // FIXME: We shouldn't be manually forcing trees here, we should replace // our usage of `productIterator` by something in `Positioned` that takes // care of low-level details like this for us. - p match { - case p: WithLazyField[?] => - p.forceIfLazy + p match + case p: WithLazyFields => p.forceFields() case _ => - } val iterator = p match case defdef: DefTree[?] => p.productIterator ++ defdef.mods.productIterator case _ => - p.productIterator + p.productIterator childPath(iterator, p :: path) } else { diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala index d14addb8c9c7..dd783be7a9e1 100644 --- a/compiler/src/dotty/tools/dotc/ast/Positioned.scala +++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala @@ -154,14 +154,17 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src } } + private class LastPosRef: + var positioned: Positioned | Null = null + var span = NoSpan + /** Check that all positioned items in this tree satisfy the following conditions: * - Parent spans contain child spans * - If item is a non-empty tree, it has a position */ def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { import untpd._ - var lastPositioned: Positioned | Null = null - var lastSpan = NoSpan + val last = LastPosRef() def check(p: Any): Unit = p match { case p: Positioned => assert(span contains p.span, @@ -181,19 +184,19 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src case _: XMLBlock => // FIXME: Trees generated by the XML parser do not satisfy `checkPos` case _: WildcardFunction - if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] => + if last.positioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] => // ignore transition from last wildcard parameter to body case _ => - assert(!lastSpan.exists || !p.span.exists || lastSpan.end <= p.span.start, + assert(!last.span.exists || !p.span.exists || last.span.end <= p.span.start, i"""position error, child positions overlap or in wrong order |parent = $this - |1st child = $lastPositioned - |1st child span = $lastSpan + |1st child = ${last.positioned} + |1st child span = ${last.span} |2nd child = $p |2nd child span = ${p.span}""".stripMargin) } - lastPositioned = p - lastSpan = p.span + last.positioned = p + last.span = p.span p.checkPos(nonOverlapping) case m: untpd.Modifiers => m.annotations.foreach(check) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index ff59a795d818..2d335d1ed380 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -14,10 +14,7 @@ import scala.collection.mutable import scala.annotation.tailrec -trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => - - // Note: the <: Type constraint looks necessary (and is needed to make the file compile in dotc). - // But Scalac accepts the program happily without it. Need to find out why. +trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree @@ -105,6 +102,12 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case _ => tree } + def stripTyped(tree: Tree): Tree = unsplice(tree) match + case Typed(expr, _) => + stripTyped(expr) + case _ => + tree + /** The number of arguments in an application */ def numArgs(tree: Tree): Int = unsplice(tree) match { case Apply(fn, args) => numArgs(fn) + args.length @@ -113,6 +116,24 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case _ => 0 } + /** The type arguments of a possibly curried call */ + def typeArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case TypeApply(fn, args) => loop(fn, args :: argss) + case Apply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + + /** The term arguments of a possibly curried call */ + def termArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case Apply(fn, args) => loop(fn, args :: argss) + case TypeApply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + /** All term arguments of an application in a single flattened list */ def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { case Apply(fn, args) => allArguments(fn) ::: args @@ -202,9 +223,6 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => /** Strip `=> T` to `T` and (under pureFunctions) `{refs}-> T` to `T` */ def stripByNameType(tree: Tree)(using Context): Tree = unsplice(tree) match case ByNameTypeTree(t1) => t1 - case untpd.CapturingTypeTree(_, parent) => - val parent1 = stripByNameType(parent) - if parent1 eq parent then tree else parent1 case _ => tree /** All type and value parameter symbols of this DefDef */ @@ -298,7 +316,7 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => */ def parentsKind(parents: List[Tree])(using Context): FlagSet = parents match { case Nil => NoInitsInterface - case Apply(_, _ :: _) :: _ => EmptyFlags + case Apply(_, _ :: _) :: _ | Block(_, _) :: _ => EmptyFlags case _ :: parents1 => parentsKind(parents1) } @@ -311,6 +329,50 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case Block(_, expr) => forallResults(expr, p) case _ => p(tree) } + + def appliedCore(tree: Tree): Tree = tree match { + case Apply(fn, _) => appliedCore(fn) + case TypeApply(fn, _) => appliedCore(fn) + case AppliedTypeTree(fn, _) => appliedCore(fn) + case tree => tree + } + + /** Is tree an application with result `this.type`? + * Accept `b.addOne(x)` and also `xs(i) += x` + * where the op is an assignment operator. + */ + def isThisTypeResult(tree: Tree)(using Context): Boolean = appliedCore(tree) match { + case fun @ Select(receiver, op) => + val argss = termArgss(tree) + tree.tpe match { + case ThisType(tref) => + tref.symbol == receiver.symbol + case tref: TermRef => + tref.symbol == receiver.symbol || argss.exists(_.exists(tref.symbol == _.symbol)) + case _ => + def checkSingle(sym: Symbol): Boolean = + (sym == receiver.symbol) || { + receiver match { + case Apply(_, _) => op.isOpAssignmentName // xs(i) += x + case _ => receiver.symbol != NoSymbol && + (receiver.symbol.isGetter || receiver.symbol.isField) // xs.addOne(x) for var xs + } + } + @tailrec def loop(mt: Type): Boolean = mt match { + case m: MethodType => + m.resType match { + case ThisType(tref) => checkSingle(tref.symbol) + case tref: TermRef => checkSingle(tref.symbol) + case restpe => loop(restpe) + } + case PolyType(_, restpe) => loop(restpe) + case _ => false + } + fun.symbol != NoSymbol && loop(fun.symbol.info) + } + case _ => + tree.tpe.isInstanceOf[ThisType] + } } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => @@ -334,6 +396,8 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] Some(tree) case Block(Nil, expr) => functionWithUnknownParamType(expr) + case NamedArg(_, expr) => + functionWithUnknownParamType(expr) case _ => None } @@ -400,19 +464,21 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] } } - /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. - * Only trees of the form `=> T` are matched; trees written directly as `{*}-> T` + /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `->{cap} T`. + * Only trees of the form `=> T` are matched; trees written directly as `->{cap} T` * are ignored by the extractor. */ object ImpureByNameTypeTree: - def apply(tp: ByNameTypeTree)(using Context): untpd.CapturingTypeTree = - untpd.CapturingTypeTree( - untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp) + def apply(tp: Tree)(using Context): untpd.ByNameTypeTree = + untpd.ByNameTypeTree( + untpd.CapturesAndResult( + untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp)) - def unapply(tp: Tree)(using Context): Option[ByNameTypeTree] = tp match - case untpd.CapturingTypeTree(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) - if id.span == bntp.span.startPos => Some(bntp) + def unapply(tp: Tree)(using Context): Option[Tree] = tp match + case untpd.ByNameTypeTree( + untpd.CapturesAndResult(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, result)) + if id.span == result.span.startPos => Some(result) case _ => None end ImpureByNameTypeTree } @@ -686,24 +752,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => } } - /** The type arguments of a possibly curried call */ - def typeArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case TypeApply(fn, args) => loop(fn, args :: argss) - case Apply(fn, args) => loop(fn, argss) - case _ => argss - loop(tree, Nil) - - /** The term arguments of a possibly curried call */ - def termArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case Apply(fn, args) => loop(fn, args :: argss) - case TypeApply(fn, args) => loop(fn, argss) - case _ => argss - loop(tree, Nil) - /** The type and term arguments of a possibly curried call, in the order they are given */ def allArgss(tree: Tree): List[List[Tree]] = @tailrec @@ -746,8 +794,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => Some(meth) case Block(Nil, expr) => unapply(expr) - case Inlined(_, bindings, expr) if bindings.forall(isPureBinding) => - unapply(expr) case _ => None } @@ -791,10 +837,12 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => /** The symbols defined locally in a statement list */ def localSyms(stats: List[Tree])(using Context): List[Symbol] = - val locals = new mutable.ListBuffer[Symbol] - for stat <- stats do - if stat.isDef && stat.symbol.exists then locals += stat.symbol - locals.toList + if stats.isEmpty then Nil + else + val locals = new mutable.ListBuffer[Symbol] + for stat <- stats do + if stat.isDef && stat.symbol.exists then locals += stat.symbol + locals.toList /** If `tree` is a DefTree, the symbol defined by it, otherwise NoSymbol */ def definedSym(tree: Tree)(using Context): Symbol = @@ -913,7 +961,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => && tree.isTerm && { val qualType = tree.qualifier.tpe - hasRefinement(qualType) && !qualType.derivesFrom(defn.PolyFunctionClass) + hasRefinement(qualType) && !defn.isRefinedFunctionType(qualType) } def loop(tree: Tree): Boolean = tree match case TypeApply(fun, _) => @@ -977,33 +1025,19 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case t => assert(t.span.exists, i"$t") } - /** Extractors for quotes */ - object Quoted { + object QuotedTypeOf { /** Extracts the content of a quoted tree. * The result can be the contents of a term or type quote, which * will return a term or type tree respectively. */ def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = - if tree.symbol == defn.QuotedRuntime_exprQuote then - // quoted.runtime.Expr.quote[T]() - Some(tree.args.head) - else if tree.symbol == defn.QuotedTypeModule_of then + if tree.symbol == defn.QuotedTypeModule_of then // quoted.Type.of[](quotes) val TypeApply(_, body :: _) = tree.fun: @unchecked Some(body) else None } - /** Extractors for splices */ - object Spliced { - /** Extracts the content of a spliced expression tree. - * The result can be the contents of a term splice, which - * will return a term tree. - */ - def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = - if tree.symbol.isExprSplice then Some(tree.args.head) else None - } - /** Extractors for type splices */ object SplicedType { /** Extracts the content of a spliced type tree. @@ -1040,7 +1074,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case Inlined(_, Nil, expr) => unapply(expr) case Block(Nil, expr) => unapply(expr) case _ => - tree.tpe.widenTermRefExpr.normalized match + tree.tpe.widenTermRefExpr.dealias.normalized match case ConstantType(Constant(x)) => Some(x) case _ => None } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala index caf8d68442f6..ae674c25dc3d 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala @@ -30,16 +30,10 @@ class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { private def patternScopeCtx(pattern: Tree)(using Context): Context = { val nestedCtx = ctx.fresh.setNewScope - new TreeTraverser { - def traverse(tree: Tree)(using Context): Unit = { - tree match { - case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => - nestedCtx.enter(d.symbol) - case _ => - } - traverseChildren(tree) - } - }.traverse(pattern) + pattern.foreachSubTree { + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) + case _ => + } nestedCtx } @@ -55,10 +49,10 @@ class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { transform(tree.tpt), transform(tree.rhs)(using nestedScopeCtx(tree.paramss.flatten))) } - case impl @ Template(constr, parents, self, _) => + case impl @ Template(constr, _, self, _) => cpy.Template(tree)( transformSub(constr), - transform(parents)(using ctx.superCallContext), + transform(impl.parents)(using ctx.superCallContext), Nil, transformSelf(self), transformStats(impl.body, tree.symbol)) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 71998aff9304..955892b2ae22 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -43,7 +43,7 @@ class TreeTypeMap( def copy( typeMap: Type => Type, - treeMap: tpd.Tree => tpd.Tree, + treeMap: Tree => Tree, oldOwners: List[Symbol], newOwners: List[Symbol], substFrom: List[Symbol], @@ -85,26 +85,42 @@ class TreeTypeMap( updateDecls(prevStats.tail, newStats.tail) } - def transformInlined(tree: tpd.Inlined)(using Context): tpd.Tree = + def transformInlined(tree: Inlined)(using Context): Tree = val Inlined(call, bindings, expanded) = tree val (tmap1, bindings1) = transformDefs(bindings) val expanded1 = tmap1.transform(expanded) cpy.Inlined(tree)(call, bindings1, expanded1) - override def transform(tree: tpd.Tree)(using Context): tpd.Tree = treeMap(tree) match { - case impl @ Template(constr, parents, self, _) => + override def transform(tree: Tree)(using Context): Tree = treeMap(tree) match { + case impl @ Template(constr, _, self, _) => val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) cpy.Template(impl)( constr = tmap.transformSub(constr), - parents = parents.mapconserve(transform), + parents = impl.parents.mapconserve(transform), self = tmap.transformSub(self), body = impl.body mapconserve (tmap.transform(_)(using ctx.withOwner(mapOwner(impl.symbol.owner)))) ).withType(tmap.mapType(impl.tpe)) case tree1 => tree1.withType(mapType(tree1.tpe)) match { - case id: Ident if tpd.needsSelect(id.tpe) => - ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + case id: Ident => + if needsSelect(id.tpe) then + ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + else + super.transform(id) + case sel: Select => + if needsIdent(sel.tpe) then + ref(sel.tpe.asInstanceOf[TermRef]).withSpan(sel.span) + else + super.transform(sel) + case app: Apply => + super.transform(app) + case blk @ Block(stats, expr) => + val (tmap1, stats1) = transformDefs(stats) + val expr1 = tmap1.transform(expr) + cpy.Block(blk)(stats1, expr1) + case lit @ Literal(Constant(tpe: Type)) => + cpy.Literal(lit)(Constant(mapType(tpe))) case ddef @ DefDef(name, paramss, tpt, _) => val (tmap1, paramss1) = transformAllParamss(paramss) val res = cpy.DefDef(ddef)(name, paramss1, tmap1.transform(tpt), tmap1.transform(ddef.rhs)) @@ -117,10 +133,6 @@ class TreeTypeMap( case tdef @ LambdaTypeTree(tparams, body) => val (tmap1, tparams1) = transformDefs(tparams) cpy.LambdaTypeTree(tdef)(tparams1, tmap1.transform(body)) - case blk @ Block(stats, expr) => - val (tmap1, stats1) = transformDefs(stats) - val expr1 = tmap1.transform(expr) - cpy.Block(blk)(stats1, expr1) case inlined: Inlined => transformInlined(inlined) case cdef @ CaseDef(pat, guard, rhs) => @@ -134,23 +146,16 @@ class TreeTypeMap( val bind1 = tmap.transformSub(bind) val expr1 = tmap.transform(expr) cpy.Labeled(labeled)(bind1, expr1) - case tree @ Hole(_, _, args, content, tpt) => - val args1 = args.mapConserve(transform) - val content1 = transform(content) - val tpt1 = transform(tpt) - cpy.Hole(tree)(args = args1, content = content1, tpt = tpt1) - case lit @ Literal(Constant(tpe: Type)) => - cpy.Literal(lit)(Constant(mapType(tpe))) case tree1 => super.transform(tree1) } } - override def transformStats(trees: List[tpd.Tree], exprOwner: Symbol)(using Context): List[Tree] = + override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = transformDefs(trees)._2 - def transformDefs[TT <: tpd.Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { - val tmap = withMappedSyms(tpd.localSyms(trees)) + def transformDefs[TT <: Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { + val tmap = withMappedSyms(localSyms(trees)) (tmap, tmap.transformSub(trees)) } @@ -165,7 +170,7 @@ class TreeTypeMap( case nil => (this, paramss) - def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] + def apply[ThisTree <: Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree)) diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 1159d13d5aef..54c15b9909fa 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -15,11 +15,13 @@ import config.Printers.overload import annotation.internal.sharable import annotation.unchecked.uncheckedVariance import annotation.constructorOnly +import compiletime.uninitialized import Decorators._ +import staging.StagingLevel.* object Trees { - type Untyped = Nothing + type Untyped = Type | Null /** The total number of created tree nodes, maintained if Stats.enabled */ @sharable var ntrees: Int = 0 @@ -45,36 +47,34 @@ object Trees { * - Type checking an untyped tree should remove all embedded `TypedSplice` * nodes. */ - abstract class Tree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class Tree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable { if (Stats.enabled) ntrees += 1 /** The type constructor at the root of the tree */ - type ThisTree[T >: Untyped] <: Tree[T] + type ThisTree[T <: Untyped] <: Tree[T] - protected var myTpe: T @uncheckedVariance = _ + protected var myTpe: T @uncheckedVariance = uninitialized /** Destructively set the type of the tree. This should be called only when it is known that * it is safe under sharing to do so. One use-case is in the withType method below * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer, * where we overwrite with a simplified version of the type itself. */ - private[dotc] def overwriteType(tpe: T): Unit = + private[dotc] def overwriteType(tpe: T @uncheckedVariance): Unit = myTpe = tpe /** The type of the tree. In case of an untyped tree, * an UnAssignedTypeException is thrown. (Overridden by empty trees) */ - final def tpe: T @uncheckedVariance = { - if (myTpe == null) - throw UnAssignedTypeException(this) - myTpe - } + final def tpe: T = + if myTpe == null then throw UnAssignedTypeException(this) + myTpe.uncheckedNN /** Copy `tpe` attribute from tree `from` into this tree, independently * whether it is null or not. - final def copyAttr[U >: Untyped](from: Tree[U]): ThisTree[T] = { + final def copyAttr[U <: Untyped](from: Tree[U]): ThisTree[T] = { val t1 = this.withSpan(from.span) val t2 = if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type]) @@ -131,10 +131,9 @@ object Trees { */ final def hasType: Boolean = myTpe != null - final def typeOpt: Type = myTpe match { + final def typeOpt: Type = myTpe match case tp: Type => tp - case _ => NoType - } + case null => NoType /** The denotation referred to by this tree. * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other @@ -166,7 +165,7 @@ object Trees { def toList: List[Tree[T]] = this :: Nil /** if this tree is the empty tree, the alternative, else this tree */ - inline def orElse[U >: Untyped <: T](inline that: Tree[U]): Tree[U] = + inline def orElse[U >: T <: Untyped](inline that: Tree[U]): Tree[U] = if (this eq genericEmptyTree) that else this /** The number of nodes in this tree */ @@ -217,42 +216,42 @@ object Trees { override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] } - class UnAssignedTypeException[T >: Untyped](tree: Tree[T]) extends RuntimeException { + class UnAssignedTypeException[T <: Untyped](tree: Tree[T]) extends RuntimeException { override def getMessage: String = s"type of $tree is not assigned" } - type LazyTree[-T >: Untyped] = Tree[T] | Lazy[Tree[T]] - type LazyTreeList[-T >: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] + type LazyTree[+T <: Untyped] = Tree[T] | Lazy[Tree[T]] + type LazyTreeList[+T <: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] // ------ Categories of trees ----------------------------------- /** Instances of this class are trees for which isType is definitely true. * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) */ - trait TypTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: TypTree[T] + trait TypTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TypTree[T] override def isType: Boolean = true } /** Instances of this class are trees for which isTerm is definitely true. * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) */ - trait TermTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: TermTree[T] + trait TermTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TermTree[T] override def isTerm: Boolean = true } /** Instances of this class are trees which are not terms but are legal * parts of patterns. */ - trait PatternTree[-T >: Untyped] extends Tree[T] { - type ThisTree[-T >: Untyped] <: PatternTree[T] + trait PatternTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: PatternTree[T] override def isPattern: Boolean = true } /** Tree's denotation can be derived from its type */ - abstract class DenotingTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: DenotingTree[T] + abstract class DenotingTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: DenotingTree[T] override def denot(using Context): Denotation = typeOpt.stripped match case tpe: NamedType => tpe.denot case tpe: ThisType => tpe.cls.denot @@ -262,8 +261,8 @@ object Trees { /** Tree's denot/isType/isTerm properties come from a subtree * identified by `forwardTo`. */ - abstract class ProxyTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: ProxyTree[T] + abstract class ProxyTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: ProxyTree[T] def forwardTo: Tree[T] override def denot(using Context): Denotation = forwardTo.denot override def isTerm: Boolean = forwardTo.isTerm @@ -271,24 +270,24 @@ object Trees { } /** Tree has a name */ - abstract class NameTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: NameTree[T] + abstract class NameTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: NameTree[T] def name: Name } /** Tree refers by name to a denotation */ - abstract class RefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[-T >: Untyped] <: RefTree[T] + abstract class RefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { + type ThisTree[+T <: Untyped] <: RefTree[T] def qualifier: Tree[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName } /** Tree defines a new symbol */ - trait DefTree[-T >: Untyped] extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: DefTree[T] + trait DefTree[+T <: Untyped] extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: DefTree[T] - private var myMods: untpd.Modifiers | Null = _ + private var myMods: untpd.Modifiers | Null = uninitialized private[dotc] def rawMods: untpd.Modifiers = if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN @@ -313,7 +312,7 @@ object Trees { extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods - sealed trait WithEndMarker[-T >: Untyped]: + sealed trait WithEndMarker[+T <: Untyped]: self: PackageDef[T] | NamedDefTree[T] => import WithEndMarker.* @@ -356,9 +355,9 @@ object Trees { end WithEndMarker - abstract class NamedDefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class NamedDefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] with DefTree[T] with WithEndMarker[T] { - type ThisTree[-T >: Untyped] <: NamedDefTree[T] + type ThisTree[+T <: Untyped] <: NamedDefTree[T] protected def srcName(using Context): Name = if name == nme.CONSTRUCTOR then nme.this_ @@ -395,8 +394,8 @@ object Trees { * The envelope of a MemberDef contains the whole definition and has its point * on the opening keyword (or the next token after that if keyword is missing). */ - abstract class MemberDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { - type ThisTree[-T >: Untyped] <: MemberDef[T] + abstract class MemberDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { + type ThisTree[+T <: Untyped] <: MemberDef[T] def rawComment: Option[Comment] = getAttachment(DocComment) @@ -409,40 +408,40 @@ object Trees { } /** A ValDef or DefDef tree */ - abstract class ValOrDefDef[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { - type ThisTree[-T >: Untyped] <: ValOrDefDef[T] + abstract class ValOrDefDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T], WithLazyFields { + type ThisTree[+T <: Untyped] <: ValOrDefDef[T] def name: TermName def tpt: Tree[T] - def unforcedRhs: LazyTree[T] = unforced - def rhs(using Context): Tree[T] = forceIfLazy + def unforcedRhs: LazyTree[T] + def rhs(using Context): Tree[T] } - trait ValOrTypeDef[-T >: Untyped] extends MemberDef[T]: - type ThisTree[-T >: Untyped] <: ValOrTypeDef[T] + trait ValOrTypeDef[+T <: Untyped] extends MemberDef[T]: + type ThisTree[+T <: Untyped] <: ValOrTypeDef[T] - type ParamClause[T >: Untyped] = List[ValDef[T]] | List[TypeDef[T]] + type ParamClause[T <: Untyped] = List[ValDef[T]] | List[TypeDef[T]] // ----------- Tree case classes ------------------------------------ /** name */ - case class Ident[-T >: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) + case class Ident[+T <: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) extends RefTree[T] { - type ThisTree[-T >: Untyped] = Ident[T] + type ThisTree[+T <: Untyped] = Ident[T] def qualifier: Tree[T] = genericEmptyTree def isBackquoted: Boolean = hasAttachment(Backquoted) } - class SearchFailureIdent[-T >: Untyped] private[ast] (name: Name, expl: => String)(implicit @constructorOnly src: SourceFile) + class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: => String)(implicit @constructorOnly src: SourceFile) extends Ident[T](name) { def explanation = expl override def toString: String = s"SearchFailureIdent($explanation)" } /** qualifier.name, or qualifier#name, if qualifier is a type */ - case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) + case class Select[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) extends RefTree[T] { - type ThisTree[-T >: Untyped] = Select[T] + type ThisTree[+T <: Untyped] = Select[T] override def denot(using Context): Denotation = typeOpt match case ConstantType(_) if ConstFold.foldedUnops.contains(name) => @@ -464,15 +463,15 @@ object Trees { else span } - class SelectWithSig[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) + class SelectWithSig[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) extends Select[T](qualifier, name) { override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" } /** qual.this */ - case class This[-T >: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) + case class This[+T <: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = This[T] + type ThisTree[+T <: Untyped] = This[T] // Denotation of a This tree is always the underlying class; needs correction for modules. override def denot(using Context): Denotation = typeOpt match { @@ -484,21 +483,21 @@ object Trees { } /** C.super[mix], where qual = C.this */ - case class Super[-T >: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) + case class Super[+T <: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Super[T] + type ThisTree[+T <: Untyped] = Super[T] def forwardTo: Tree[T] = qual } - abstract class GenericApply[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] <: GenericApply[T] + abstract class GenericApply[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] <: GenericApply[T] val fun: Tree[T] val args: List[Tree[T]] def forwardTo: Tree[T] = fun } object GenericApply: - def unapply[T >: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match + def unapply[T <: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match case tree: GenericApply[T] => Some((tree.fun, tree.args)) case _ => None @@ -509,9 +508,9 @@ object Trees { case InfixTuple // r f (x1, ..., xN) where N != 1; needs to be treated specially for an error message in typedApply /** fun(args) */ - case class Apply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Apply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends GenericApply[T] { - type ThisTree[-T >: Untyped] = Apply[T] + type ThisTree[+T <: Untyped] = Apply[T] def setApplyKind(kind: ApplyKind) = putAttachment(untpd.KindOfApply, kind) @@ -525,57 +524,57 @@ object Trees { } /** fun[args] */ - case class TypeApply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class TypeApply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends GenericApply[T] { - type ThisTree[-T >: Untyped] = TypeApply[T] + type ThisTree[+T <: Untyped] = TypeApply[T] } /** const */ - case class Literal[-T >: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) + case class Literal[+T <: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) extends Tree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Literal[T] + type ThisTree[+T <: Untyped] = Literal[T] } /** new tpt, but no constructor call */ - case class New[-T >: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class New[+T <: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = New[T] + type ThisTree[+T <: Untyped] = New[T] } /** expr : tpt */ - case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Typed[+T <: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[-T >: Untyped] = Typed[T] + type ThisTree[+T <: Untyped] = Typed[T] def forwardTo: Tree[T] = expr } /** name = arg, in a parameter list */ - case class NamedArg[-T >: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) + case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = NamedArg[T] + type ThisTree[+T <: Untyped] = NamedArg[T] } /** name = arg, outside a parameter list */ - case class Assign[-T >: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Assign[+T <: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Assign[T] + type ThisTree[+T <: Untyped] = Assign[T] } /** { stats; expr } */ - case class Block[-T >: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Block[+T <: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = Block[T] + type ThisTree[+T <: Untyped] = Block[T] override def isType: Boolean = expr.isType override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary } /** if cond then thenp else elsep */ - case class If[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + case class If[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = If[T] + type ThisTree[+T <: Untyped] = If[T] def isInline = false } - class InlineIf[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + class InlineIf[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) extends If(cond, thenp, elsep) { override def isInline = true override def toString = s"InlineIf($cond, $thenp, $elsep)" @@ -590,33 +589,33 @@ object Trees { * of the closure is a function type, otherwise it is the type * given in `tpt`, which must be a SAM type. */ - case class Closure[-T >: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Closure[+T <: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Closure[T] + type ThisTree[+T <: Untyped] = Closure[T] } /** selector match { cases } */ - case class Match[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + case class Match[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Match[T] + type ThisTree[+T <: Untyped] = Match[T] def isInline = false } - class InlineMatch[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + class InlineMatch[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends Match(selector, cases) { override def isInline = true override def toString = s"InlineMatch($selector, $cases)" } /** case pat if guard => body */ - case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class CaseDef[+T <: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = CaseDef[T] + type ThisTree[+T <: Untyped] = CaseDef[T] } /** label[tpt]: { expr } */ - case class Labeled[-T >: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Labeled[+T <: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[-T >: Untyped] = Labeled[T] + type ThisTree[+T <: Untyped] = Labeled[T] def name: Name = bind.name } @@ -625,33 +624,33 @@ object Trees { * After program transformations this is not necessarily the enclosing method, because * closures can intervene. */ - case class Return[-T >: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) + case class Return[+T <: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Return[T] + type ThisTree[+T <: Untyped] = Return[T] } /** while (cond) { body } */ - case class WhileDo[-T >: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class WhileDo[+T <: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = WhileDo[T] + type ThisTree[+T <: Untyped] = WhileDo[T] } /** try block catch cases finally finalizer */ - case class Try[-T >: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Try[+T <: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { - type ThisTree[-T >: Untyped] = Try[T] + type ThisTree[+T <: Untyped] = Try[T] } /** Seq(elems) * @param tpt The element type of the sequence. */ - case class SeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + case class SeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = SeqLiteral[T] + type ThisTree[+T <: Untyped] = SeqLiteral[T] } /** Array(elems) */ - class JavaSeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + class JavaSeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends SeqLiteral(elems, elemtpt) { override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" } @@ -672,46 +671,134 @@ object Trees { * different context: `bindings` represent the arguments to the inlined * call, whereas `expansion` represents the body of the inlined function. */ - case class Inlined[-T >: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Inlined[+T <: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] = Inlined[T] + type ThisTree[+T <: Untyped] = Inlined[T] override def isTerm = expansion.isTerm override def isType = expansion.isType } + /** A tree representing a quote `'{ body }` or `'[ body ]`. + * `Quote`s are created by the `Parser`. In typer they can be typed as a + * `Quote` with a known `tpt` or desugared and typed as a quote pattern. + * + * `Quotes` are checked and transformed in the `staging`, `splicing` and `pickleQuotes` + * phases. After `pickleQuotes` phase, the only quotes that exist are in `inline` + * methods. These are dropped when we remove the inline method implementations. + * + * Type quotes `'[body]` from the parser are desugared into quote patterns (using a `Type.of[T]]`) + * when type checking. TASTy files will not contain type quotes. Type quotes are used again + * in the `staging` phase to represent the reification of `Type.of[T]]`. + * + * Type tags `tags` are always empty before the `staging` phase. Tags for stage inconsistent + * types are added in the `staging` phase to level 0 quotes. Tags for types that refer to + * definitions in an outer quote are added in the `splicing` phase + * + * @param body The tree that was quoted + * @param tags Term references to instances of `Type[T]` for `T`s that are used in the quote + */ + case class Quote[+T <: Untyped] private[ast] (body: Tree[T], tags: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = Quote[T] + + /** Is this a type quote `'[tpe]' */ + def isTypeQuote = body.isType + + /** Type of the quoted expression as seen from outside the quote */ + def bodyType(using Context): Type = + val quoteType = typeOpt // `Quotes ?=> Expr[T]` or `Quotes ?=> Type[T]` + val exprType = quoteType.argInfos.last // `Expr[T]` or `Type[T]` + exprType.argInfos.head // T + + /** Set the type of the body of the quote */ + def withBodyType(tpe: Type)(using Context): Quote[Type] = + val exprType = // `Expr[T]` or `Type[T]` + if body.isTerm then defn.QuotedExprClass.typeRef.appliedTo(tpe) + else defn.QuotedTypeClass.typeRef.appliedTo(tpe) + val quoteType = // `Quotes ?=> Expr[T]` or `Quotes ?=> Type[T]` + defn.FunctionType(1, isContextual = true) + .appliedTo(defn.QuotesClass.typeRef, exprType) + withType(quoteType) + } + + /** A tree representing a splice `${ expr }` + * + * `Splice`s are created by the `Parser`. In typer they can be typed as a + * `Splice` with a known `tpt` or desugared and typed as a quote pattern holes. + * + * `Splice` are checked and transformed in the `staging` and `splicing` phases. + * After `splicing` phase, the only splices that exist are in `inline` + * methods. These are dropped when we remove the inline method implementations. + * + * @param expr The tree that was spliced + */ + case class Splice[+T <: Untyped] private[ast] (expr: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = Splice[T] + } + + /** A tree representing a pattern splice `${ pattern }`, `$ident` or `$ident(args*)` in a quote pattern. + * + * Parser will only create `${ pattern }` and `$ident`, hence they will not have args. + * While typing, the `$ident(args*)` the args are identified and desugared into a `SplicePattern` + * containing them. + * + * SplicePattern are removed after typing the pattern and are not present in TASTy. + * + * @param body The tree that was spliced + * @param args The arguments of the splice (the HOAS arguments) + */ + case class SplicePattern[+T <: Untyped] private[ast] (body: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = SplicePattern[T] + } + /** A type tree that represents an existing or inferred type */ - case class TypeTree[-T >: Untyped]()(implicit @constructorOnly src: SourceFile) + case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = TypeTree[T] + type ThisTree[+T <: Untyped] = TypeTree[T] override def isEmpty: Boolean = !hasType override def toString: String = s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" } + /** Tree that replaces a level 1 splices in pickled (level 0) quotes. + * It is only used when picking quotes (will never be in a TASTy file). + * + * @param isTerm If this hole is a term, otherwise it is a type hole. + * @param idx The index of the hole in it's enclosing level 0 quote. + * @param args The arguments of the splice to compute its content + * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. + */ + case class Hole[+T <: Untyped](override val isTerm: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: Hole[T] + override def isType: Boolean = !isTerm + } + /** A type tree whose type is inferred. These trees appear in two contexts * - as an argument of a TypeApply. In that case its type is always a TypeVar * - as a (result-)type of an inferred ValDef or DefDef. * Every TypeVar is created as the type of one InferredTypeTree. */ - class InferredTypeTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] + class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] /** ref.type */ - case class SingletonTypeTree[-T >: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) + case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = SingletonTypeTree[T] + type ThisTree[+T <: Untyped] = SingletonTypeTree[T] } /** tpt { refinements } */ - case class RefinedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class RefinedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = RefinedTypeTree[T] + type ThisTree[+T <: Untyped] = RefinedTypeTree[T] def forwardTo: Tree[T] = tpt } /** tpt[args] */ - case class AppliedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class AppliedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TypTree[T] { - type ThisTree[-T >: Untyped] = AppliedTypeTree[T] + type ThisTree[+T <: Untyped] = AppliedTypeTree[T] def forwardTo: Tree[T] = tpt } @@ -738,40 +825,40 @@ object Trees { * source code written by the user with the trees used by the compiler (for * example, to make "find all references" work in the IDE). */ - case class LambdaTypeTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class LambdaTypeTree[+T <: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = LambdaTypeTree[T] + type ThisTree[+T <: Untyped] = LambdaTypeTree[T] } - case class TermLambdaTypeTree[-T >: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TermLambdaTypeTree[+T <: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = TermLambdaTypeTree[T] + type ThisTree[+T <: Untyped] = TermLambdaTypeTree[T] } /** [bound] selector match { cases } */ - case class MatchTypeTree[-T >: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + case class MatchTypeTree[+T <: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = MatchTypeTree[T] + type ThisTree[+T <: Untyped] = MatchTypeTree[T] } /** => T */ - case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) + case class ByNameTypeTree[+T <: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = ByNameTypeTree[T] + type ThisTree[+T <: Untyped] = ByNameTypeTree[T] } /** >: lo <: hi * >: lo <: hi = alias for RHS of bounded opaque type */ - case class TypeBoundsTree[-T >: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TypeBoundsTree[+T <: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) extends TypTree[T] { - type ThisTree[-T >: Untyped] = TypeBoundsTree[T] + type ThisTree[+T <: Untyped] = TypeBoundsTree[T] } /** name @ body */ - case class Bind[-T >: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Bind[+T <: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] with PatternTree[T] { - type ThisTree[-T >: Untyped] = Bind[T] + type ThisTree[+T <: Untyped] = Bind[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName @@ -780,9 +867,9 @@ object Trees { } /** tree_1 | ... | tree_n */ - case class Alternative[-T >: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Alternative[+T <: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends PatternTree[T] { - type ThisTree[-T >: Untyped] = Alternative[T] + type ThisTree[+T <: Untyped] = Alternative[T] } /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following @@ -799,29 +886,33 @@ object Trees { * val result = fun(sel)(implicits) * if (result.isDefined) "match patterns against result" */ - case class UnApply[-T >: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class UnApply[+T <: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with PatternTree[T] { - type ThisTree[-T >: Untyped] = UnApply[T] + type ThisTree[+T <: Untyped] = UnApply[T] def forwardTo = fun } /** mods val name: tpt = rhs */ - case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class ValDef[+T <: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) extends ValOrDefDef[T], ValOrTypeDef[T] { - type ThisTree[-T >: Untyped] = ValDef[T] + type ThisTree[+T <: Untyped] = ValDef[T] assert(isEmpty || (tpt ne genericEmptyTree)) - def unforced: LazyTree[T] = preRhs - protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + + def unforcedRhs: LazyTree[T] = preRhs + def forceFields()(using Context): Unit = preRhs = force(preRhs) + def rhs(using Context): Tree[T] = { forceFields(); preRhs.asInstanceOf[Tree[T]] } } /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ - case class DefDef[-T >: Untyped] private[ast] (name: TermName, - paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) + case class DefDef[+T <: Untyped] private[ast] (name: TermName, + paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) extends ValOrDefDef[T] { - type ThisTree[-T >: Untyped] = DefDef[T] + type ThisTree[+T <: Untyped] = DefDef[T] assert(tpt ne genericEmptyTree) - def unforced: LazyTree[T] = preRhs - protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + + def unforcedRhs: LazyTree[T] = preRhs + def forceFields()(using Context): Unit = preRhs = force(preRhs) + def rhs(using Context): Tree[T] = { forceFields(); preRhs.asInstanceOf[Tree[T]] } def leadingTypeParams(using Context): List[TypeDef[T]] = paramss match case (tparams @ (tparam: TypeDef[_]) :: _) :: _ => tparams.asInstanceOf[List[TypeDef[T]]] @@ -842,9 +933,9 @@ object Trees { * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) or * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods */ - case class TypeDef[-T >: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + case class TypeDef[+T <: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) extends MemberDef[T], ValOrTypeDef[T] { - type ThisTree[-T >: Untyped] = TypeDef[T] + type ThisTree[+T <: Untyped] = TypeDef[T] /** Is this a definition of a class? */ def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] @@ -853,26 +944,30 @@ object Trees { } /** extends parents { self => body } - * @param parentsOrDerived A list of parents followed by a list of derived classes, - * if this is of class untpd.DerivingTemplate. - * Typed templates only have parents. + * @param preParentsOrDerived A list of parents followed by a list of derived classes, + * if this is of class untpd.DerivingTemplate. + * Typed templates only have parents. */ - case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T @uncheckedVariance])(implicit @constructorOnly src: SourceFile) - extends DefTree[T] with WithLazyField[List[Tree[T]]] { - type ThisTree[-T >: Untyped] = Template[T] - def unforcedBody: LazyTreeList[T] = unforced - def unforced: LazyTreeList[T] = preBody - protected def force(x: List[Tree[T @uncheckedVariance]]): Unit = preBody = x - def body(using Context): List[Tree[T]] = forceIfLazy + case class Template[+T <: Untyped] private[ast] (constr: DefDef[T], private var preParentsOrDerived: LazyTreeList[T], self: ValDef[T], private var preBody: LazyTreeList[T])(implicit @constructorOnly src: SourceFile) + extends DefTree[T] with WithLazyFields { + type ThisTree[+T <: Untyped] = Template[T] + + def forceFields()(using Context): Unit = + preParentsOrDerived = force(preParentsOrDerived) + preBody = force(preBody) + + def unforcedBody: LazyTreeList[T] = preBody + def body(using Context): List[Tree[T]] = { forceFields(); preBody.asInstanceOf[List[Tree[T]]] } + def parentsOrDerived(using Context): List[Tree[T]] = { forceFields(); preParentsOrDerived.asInstanceOf[List[Tree[T]]] } - def parents: List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate - def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate + def parents(using Context): List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate + def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate } - abstract class ImportOrExport[-T >: Untyped](implicit @constructorOnly src: SourceFile) + abstract class ImportOrExport[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[-T >: Untyped] <: ImportOrExport[T] + type ThisTree[+T <: Untyped] <: ImportOrExport[T] val expr: Tree[T] val selectors: List[untpd.ImportSelector] } @@ -881,36 +976,36 @@ object Trees { * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ - case class Import[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + case class Import[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) extends ImportOrExport[T] { - type ThisTree[-T >: Untyped] = Import[T] + type ThisTree[+T <: Untyped] = Import[T] } /** export expr.selectors * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ - case class Export[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + case class Export[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) extends ImportOrExport[T] { - type ThisTree[-T >: Untyped] = Export[T] + type ThisTree[+T <: Untyped] = Export[T] } /** package pid { stats } */ - case class PackageDef[-T >: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class PackageDef[+T <: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with WithEndMarker[T] { - type ThisTree[-T >: Untyped] = PackageDef[T] + type ThisTree[+T <: Untyped] = PackageDef[T] def forwardTo: RefTree[T] = pid protected def srcName(using Context): Name = pid.name } /** arg @annot */ - case class Annotated[-T >: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) + case class Annotated[+T <: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) extends ProxyTree[T] { - type ThisTree[-T >: Untyped] = Annotated[T] + type ThisTree[+T <: Untyped] = Annotated[T] def forwardTo: Tree[T] = arg } - trait WithoutTypeOrPos[-T >: Untyped] extends Tree[T] { + trait WithoutTypeOrPos[+T <: Untyped] extends Tree[T] { override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] override def span: Span = NoSpan override def span_=(span: Span): Unit = {} @@ -921,17 +1016,17 @@ object Trees { * The contained trees will be integrated when transformed with * a `transform(List[Tree])` call. */ - case class Thicket[-T >: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class Thicket[+T <: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends Tree[T] with WithoutTypeOrPos[T] { myTpe = NoType.asInstanceOf[T] - type ThisTree[-T >: Untyped] = Thicket[T] + type ThisTree[+T <: Untyped] = Thicket[T] - def mapElems(op: Tree[T] => Tree[T] @uncheckedVariance): Thicket[T] = { + def mapElems[U >: T <: Untyped](op: Tree[T] => Tree[U]): Thicket[U] = { val newTrees = trees.mapConserve(op) if (trees eq newTrees) this else - Thicket[T](newTrees)(source).asInstanceOf[this.type] + Thicket[U](newTrees)(source).asInstanceOf[this.type] } override def foreachInThicket(op: Tree[T] => Unit): Unit = @@ -950,12 +1045,12 @@ object Trees { mapElems(_.withSpan(span)).asInstanceOf[this.type] } - class EmptyTree[T >: Untyped] extends Thicket(Nil)(NoSource) { + class EmptyTree[T <: Untyped] extends Thicket(Nil)(NoSource) { // assert(uniqueId != 1492) override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") } - class EmptyValDef[T >: Untyped] extends ValDef[T]( + class EmptyValDef[T <: Untyped] extends ValDef[T]( nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { myTpe = NoType.asInstanceOf[T] setMods(untpd.Modifiers(PrivateLocal)) @@ -966,25 +1061,10 @@ object Trees { @sharable val theEmptyTree = new EmptyTree[Type]() @sharable val theEmptyValDef = new EmptyValDef[Type]() - def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] - def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] - - /** Tree that replaces a level 1 splices in pickled (level 0) quotes. - * It is only used when picking quotes (will never be in a TASTy file). - * - * @param isTermHole If this hole is a term, otherwise it is a type hole. - * @param idx The index of the hole in it's enclosing level 0 quote. - * @param args The arguments of the splice to compute its content - * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. - * @param tpt Type of the hole - */ - case class Hole[-T >: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[-T >: Untyped] <: Hole[T] - override def isTerm: Boolean = isTermHole - override def isType: Boolean = !isTermHole - } + def genericEmptyValDef[T <: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] + def genericEmptyTree[T <: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] - def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { + def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = remaining match { case Thicket(elems) :: remaining1 => @@ -1010,34 +1090,31 @@ object Trees { // ----- Lazy trees and tree sequences - /** A tree that can have a lazy field - * The field is represented by some private `var` which is - * accessed by `unforced` and `force`. Forcing the field will - * set the `var` to the underlying value. - */ - trait WithLazyField[+T <: AnyRef] { - def unforced: T | Lazy[T] - protected def force(x: T @uncheckedVariance): Unit - def forceIfLazy(using Context): T = unforced match { - case lzy: Lazy[T @unchecked] => - val x = lzy.complete - force(x) - x - case x: T @ unchecked => x - } - } - /** A base trait for lazy tree fields. * These can be instantiated with Lazy instances which * can delay tree construction until the field is first demanded. */ - trait Lazy[+T <: AnyRef] { + trait Lazy[+T <: AnyRef]: def complete(using Context): T - } + + /** A tree that can have a lazy fields. + * Such fields are variables of type `T | Lazy[T]`, for some tyope `T`. + */ + trait WithLazyFields: + + /** If `x` is lazy, computes the underlying value */ + protected def force[T <: AnyRef](x: T | Lazy[T])(using Context): T = x match + case x: Lazy[T] @unchecked => x.complete + case x: T @unchecked => x + + /** Assigns all lazy fields their underlying non-lazy value. */ + def forceFields()(using Context): Unit + + end WithLazyFields // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. - abstract class Instance[T >: Untyped <: Type] { inst => + abstract class Instance[T <: Untyped] { inst => type Tree = Trees.Tree[T] type TypTree = Trees.TypTree[T] @@ -1084,6 +1161,9 @@ object Trees { type SeqLiteral = Trees.SeqLiteral[T] type JavaSeqLiteral = Trees.JavaSeqLiteral[T] type Inlined = Trees.Inlined[T] + type Quote = Trees.Quote[T] + type Splice = Trees.Splice[T] + type SplicePattern = Trees.SplicePattern[T] type TypeTree = Trees.TypeTree[T] type InferredTypeTree = Trees.InferredTypeTree[T] type SingletonTypeTree = Trees.SingletonTypeTree[T] @@ -1254,6 +1334,18 @@ object Trees { case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree case _ => finalize(tree, untpd.Inlined(call, bindings, expansion)(sourceFile(tree))) } + def Quote(tree: Tree)(body: Tree, tags: List[Tree])(using Context): Quote = tree match { + case tree: Quote if (body eq tree.body) && (tags eq tree.tags) => tree + case _ => finalize(tree, untpd.Quote(body, tags)(sourceFile(tree))) + } + def Splice(tree: Tree)(expr: Tree)(using Context): Splice = tree match { + case tree: Splice if (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.Splice(expr)(sourceFile(tree))) + } + def SplicePattern(tree: Tree)(body: Tree, args: List[Tree])(using Context): SplicePattern = tree match { + case tree: SplicePattern if (body eq tree.body) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.SplicePattern(body, args)(sourceFile(tree))) + } def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { case tree: SingletonTypeTree if (ref eq tree.ref) => tree case _ => finalize(tree, untpd.SingletonTypeTree(ref)(sourceFile(tree))) @@ -1334,9 +1426,9 @@ object Trees { case tree: Thicket if (trees eq tree.trees) => tree case _ => finalize(tree, untpd.Thicket(trees)(sourceFile(tree))) } - def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = tree match { + def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree)(using Context): Hole = tree match { case tree: Hole if isTerm == tree.isTerm && idx == tree.idx && args.eq(tree.args) && content.eq(tree.content) && content.eq(tree.content) => tree - case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content, tpt)(sourceFile(tree))) + case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content)(sourceFile(tree))) } // Copier methods with default arguments; these demand that the original tree @@ -1357,10 +1449,10 @@ object Trees { DefDef(tree: Tree)(name, paramss, tpt, rhs) def TypeDef(tree: TypeDef)(name: TypeName = tree.name, rhs: Tree = tree.rhs)(using Context): TypeDef = TypeDef(tree: Tree)(name, rhs) - def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody)(using Context): Template = + def Template(tree: Template)(using Context)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody): Template = Template(tree: Tree)(constr, parents, derived, self, body) - def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content, tpt: Tree = tree.tpt)(using Context): Hole = - Hole(tree: Tree)(isTerm, idx, args, content, tpt) + def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content)(using Context): Hole = + Hole(tree: Tree)(isTerm, idx, args, content) } @@ -1372,7 +1464,7 @@ object Trees { * innermost enclosing call for which the inlined version is currently * processed. */ - protected def inlineContext(call: Tree)(using Context): Context = ctx + protected def inlineContext(call: tpd.Tree)(using Context): Context = ctx /** The context to use when mapping or accumulating over a tree */ def localCtx(tree: Tree)(using Context): Context @@ -1491,8 +1583,14 @@ object Trees { case Thicket(trees) => val trees1 = transform(trees) if (trees1 eq trees) tree else Thicket(trees1) - case tree @ Hole(_, _, args, content, tpt) => - cpy.Hole(tree)(args = transform(args), content = transform(content), tpt = transform(tpt)) + case Quote(body, tags) => + cpy.Quote(tree)(transform(body)(using quoteContext), transform(tags)) + case tree @ Splice(expr) => + cpy.Splice(tree)(transform(expr)(using spliceContext)) + case tree @ SplicePattern(body, args) => + cpy.SplicePattern(tree)(transform(body)(using spliceContext), transform(args)) + case tree @ Hole(isTerm, idx, args, content) => + cpy.Hole(tree)(isTerm, idx, transform(args), transform(content)) case _ => transformMoreCases(tree) } @@ -1620,8 +1718,8 @@ object Trees { inContext(localCtx(tree)) { this(x, rhs) } - case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => - this(this(this(this(x, constr), parents), self), tree.body) + case tree @ Template(constr, _, self, _) if tree.derived.isEmpty => + this(this(this(this(x, constr), tree.parents), self), tree.body) case Import(expr, _) => this(x, expr) case Export(expr, _) => @@ -1632,8 +1730,14 @@ object Trees { this(this(x, arg), annot) case Thicket(ts) => this(x, ts) - case Hole(_, _, args, content, tpt) => - this(this(this(x, args), content), tpt) + case Quote(body, tags) => + this(this(x, body)(using quoteContext), tags) + case Splice(expr) => + this(x, expr)(using spliceContext) + case SplicePattern(body, args) => + this(this(x, body)(using spliceContext), args) + case Hole(_, _, args, content) => + this(this(x, args), content) case _ => foldMoreCases(x, tree) } @@ -1747,7 +1851,7 @@ object Trees { val denot = receiver.tpe.member(method) if !denot.exists then overload.println(i"members = ${receiver.tpe.decls}") - report.error(i"no member $receiver . $method", receiver.srcPos) + report.error(em"no member $receiver . $method", receiver.srcPos) val selected = if (denot.isOverloaded) { def typeParamCount(tp: Type) = tp.widen match { diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 52325e36037d..76e16cc00a90 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -47,12 +47,18 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { Apply(expr, args) case _: RefTree | _: GenericApply | _: Inlined | _: Hole => ta.assignType(untpd.Apply(fn, args), fn, args) + case _ => + assert(ctx.reporter.errorsReported) + ta.assignType(untpd.Apply(fn, args), fn, args) def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match case Block(Nil, expr) => TypeApply(expr, args) case _: RefTree | _: GenericApply => ta.assignType(untpd.TypeApply(fn, args), fn, args) + case _ => + assert(ctx.reporter.errorsReported) + ta.assignType(untpd.TypeApply(fn, args), fn, args) def Literal(const: Constant)(using Context): Literal = ta.assignType(untpd.Literal(const)) @@ -164,6 +170,15 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Inlined(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = ta.assignType(untpd.Inlined(call, bindings, expansion), bindings, expansion) + def Quote(body: Tree, tags: List[Tree])(using Context): Quote = + untpd.Quote(body, tags).withBodyType(body.tpe) + + def Splice(expr: Tree, tpe: Type)(using Context): Splice = + untpd.Splice(expr).withType(tpe) + + def Hole(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpe: Type)(using Context): Hole = + untpd.Hole(isTerm, idx, args, content).withType(tpe) + def TypeTree(tp: Type, inferred: Boolean = false)(using Context): TypeTree = (if inferred then untpd.InferredTypeTree() else untpd.TypeTree()).withType(tp) @@ -254,12 +269,12 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { // If `isParamDependent == false`, the value of `previousParamRefs` is not used. if isParamDependent then mutable.ListBuffer[TermRef]() else (null: ListBuffer[TermRef] | Null).uncheckedNN - def valueParam(name: TermName, origInfo: Type): TermSymbol = + def valueParam(name: TermName, origInfo: Type, isErased: Boolean): TermSymbol = val maybeImplicit = if tp.isContextualMethod then Given else if tp.isImplicitMethod then Implicit else EmptyFlags - val maybeErased = if tp.isErasedMethod then Erased else EmptyFlags + val maybeErased = if isErased then Erased else EmptyFlags def makeSym(info: Type) = newSymbol(sym, name, TermParam | maybeImplicit | maybeErased, info, coord = sym.coord) @@ -277,7 +292,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { assert(vparams.hasSameLengthAs(tp.paramNames) && vparams.head.isTerm) (vparams.asInstanceOf[List[TermSymbol]], remaining1) case nil => - (tp.paramNames.lazyZip(tp.paramInfos).map(valueParam), Nil) + (tp.paramNames.lazyZip(tp.paramInfos).lazyZip(tp.erasedParams).map(valueParam), Nil) val (rtp, paramss) = recur(tp.instantiate(vparams.map(_.termRef)), remaining1) (rtp, vparams :: paramss) case _ => @@ -385,9 +400,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Throw(expr: Tree)(using Context): Tree = ref(defn.throwMethod).appliedTo(expr) - def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = - ta.assignType(untpd.Hole(isTermHole, idx, args, content, tpt), tpt) - // ------ Making references ------------------------------------------------------ def prefixIsElidable(tp: NamedType)(using Context): Boolean = { @@ -414,6 +426,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _ => false } + def needsIdent(tp: Type)(using Context): Boolean = tp match + case tp: TermRef => tp.prefix eq NoPrefix + case _ => false + /** A tree representing the same reference as the given type */ def ref(tp: NamedType, needLoad: Boolean = true)(using Context): Tree = if (tp.isType) TypeTree(tp) @@ -428,7 +444,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else val res = Select(TypeTree(pre), tp) if needLoad && !res.symbol.isStatic then - throw new TypeError(em"cannot establish a reference to $res") + throw TypeError(em"cannot establish a reference to $res") res def ref(sym: Symbol)(using Context): Tree = @@ -857,7 +873,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } /** After phase `trans`, set the owner of every definition in this tree that was formerly - * owner by `from` to `to`. + * owned by `from` to `to`. */ def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(using Context): ThisTree = if (ctx.phase == trans.next) { @@ -1130,10 +1146,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def etaExpandCFT(using Context): Tree = def expand(target: Tree, tp: Type)(using Context): Tree = tp match - case defn.ContextFunctionType(argTypes, resType, isErased) => + case defn.ContextFunctionType(argTypes, resType, _) => val anonFun = newAnonFun( ctx.owner, - MethodType.companion(isContextual = true, isErased = isErased)(argTypes, resType), + MethodType.companion(isContextual = true)(argTypes, resType), coord = ctx.owner.coord) def lambdaBody(refss: List[List[Tree]]) = expand(target.select(nme.apply).appliedToArgss(refss), resType)( @@ -1144,35 +1160,38 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { expand(tree, tree.tpe.widen) } - inline val MapRecursionLimit = 10 - extension (trees: List[Tree]) - /** A map that expands to a recursive function. It's equivalent to + /** Equivalent (but faster) to * * flatten(trees.mapConserve(op)) * - * and falls back to it after `MaxRecursionLimit` recursions. - * Before that it uses a simpler method that uses stackspace - * instead of heap. - * Note `op` is duplicated in the generated code, so it should be - * kept small. + * assuming that `trees` does not contain `Thicket`s to start with. */ - inline def mapInline(inline op: Tree => Tree): List[Tree] = - def recur(trees: List[Tree], count: Int): List[Tree] = - if count > MapRecursionLimit then - // use a slower implementation that avoids stack overflows - flatten(trees.mapConserve(op)) - else trees match - case tree :: rest => - val tree1 = op(tree) - val rest1 = recur(rest, count + 1) - if (tree1 eq tree) && (rest1 eq rest) then trees - else tree1 match - case Thicket(elems1) => elems1 ::: rest1 - case _ => tree1 :: rest1 - case nil => nil - recur(trees, 0) + inline def flattenedMapConserve(inline f: Tree => Tree): List[Tree] = + @tailrec + def loop(mapped: ListBuffer[Tree] | Null, unchanged: List[Tree], pending: List[Tree]): List[Tree] = + if pending.isEmpty then + if mapped == null then unchanged + else mapped.prependToList(unchanged) + else + val head0 = pending.head + val head1 = f(head0) + + if head1 eq head0 then + loop(mapped, unchanged, pending.tail) + else + val buf = if mapped == null then new ListBuffer[Tree] else mapped + var xc = unchanged + while xc ne pending do + buf += xc.head + xc = xc.tail + head1 match + case Thicket(elems1) => buf ++= elems1 + case _ => buf += head1 + val tail0 = pending.tail + loop(buf, tail0, tail0) + loop(null, trees, trees) /** Transform statements while maintaining import contexts and expression contexts * in the same way as Typer does. The code addresses additional concerns: @@ -1296,7 +1315,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else if (tree.tpe.widen isRef numericCls) tree else { - report.warning(i"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") + report.warning(em"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") Throw(New(defn.ClassCastExceptionClass.typeRef, Nil)).withSpan(tree.span) } } @@ -1495,7 +1514,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } } - /** Creates the tuple type tree repesentation of the type trees in `ts` */ + /** Creates the tuple type tree representation of the type trees in `ts` */ def tupleTypeTree(elems: List[Tree])(using Context): Tree = { val arity = elems.length if arity <= Definitions.MaxTupleArity then @@ -1506,10 +1525,14 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else nestedPairsTypeTree(elems) } - /** Creates the nested pairs type tree repesentation of the type trees in `ts` */ + /** Creates the nested pairs type tree representation of the type trees in `ts` */ def nestedPairsTypeTree(ts: List[Tree])(using Context): Tree = ts.foldRight[Tree](TypeTree(defn.EmptyTupleModule.termRef))((x, acc) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), x :: acc :: Nil)) + /** Creates the nested higher-kinded pairs type tree representation of the type trees in `ts` */ + def hkNestedPairsTypeTree(ts: List[Tree])(using Context): Tree = + ts.foldRight[Tree](TypeTree(defn.QuoteMatching_KNil.typeRef))((x, acc) => AppliedTypeTree(TypeTree(defn.QuoteMatching_KCons.typeRef), x :: acc :: Nil)) + /** Replaces all positions in `tree` with zero-extent positions */ private def focusPositions(tree: Tree)(using Context): Tree = { val transformer = new tpd.TreeMap { @@ -1531,7 +1554,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * * @param trees the elements the list represented by * the resulting tree should contain. - * @param tpe the type of the elements of the resulting list. + * @param tpt the type of the elements of the resulting list. * */ def mkList(trees: List[Tree], tpt: Tree)(using Context): Tree = diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index ec3eb4f05b79..e3488034fef8 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -42,7 +42,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** mods object name impl */ case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) extends MemberDef { - type ThisTree[-T >: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef + type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) } @@ -54,7 +54,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { */ class DerivingTemplate(constr: DefDef, parentsOrDerived: List[Tree], self: ValDef, preBody: LazyTreeList, derivedCount: Int)(implicit @constructorOnly src: SourceFile) extends Template(constr, parentsOrDerived, self, preBody) { - override val parents = parentsOrDerived.dropRight(derivedCount) + private val myParents = parentsOrDerived.dropRight(derivedCount) + override def parents(using Context) = myParents override val derived = parentsOrDerived.takeRight(derivedCount) } @@ -75,9 +76,13 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { override def isType: Boolean = body.isType } - /** A function type or closure with `implicit`, `erased`, or `given` modifiers */ - class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers)(implicit @constructorOnly src: SourceFile) - extends Function(args, body) + /** A function type or closure with `implicit` or `given` modifiers and information on which parameters are `erased` */ + class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers, val erasedParams: List[Boolean])(implicit @constructorOnly src: SourceFile) + extends Function(args, body) { + assert(args.length == erasedParams.length) + + def hasErasedParams = erasedParams.contains(true) + } /** A polymorphic function type */ case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -106,10 +111,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { override def isType: Boolean = !isTerm } case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class Quote(quoted: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class Splice(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree { - def isInBraces: Boolean = span.end != expr.span.end - } case class ForYield(enums: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree case class ForDo(enums: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree case class GenFrom(pat: Tree, expr: Tree, checkMode: GenCheckMode)(implicit @constructorOnly src: SourceFile) extends Tree @@ -117,6 +118,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class Into(tpt: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -146,7 +148,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { } /** {x1, ..., xN} T (only relevant under captureChecking) */ - case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree + case class CapturesAndResult(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ case class DependentTypeTree(tp: List[Symbol] => Type)(implicit @constructorOnly src: SourceFile) extends Tree @@ -395,6 +397,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def SeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): SeqLiteral = new SeqLiteral(elems, elemtpt) def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): JavaSeqLiteral = new JavaSeqLiteral(elems, elemtpt) def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit src: SourceFile): Inlined = new Inlined(call, bindings, expansion) + def Quote(body: Tree, tags: List[Tree])(implicit src: SourceFile): Quote = new Quote(body, tags) + def Splice(expr: Tree)(implicit src: SourceFile): Splice = new Splice(expr) + def SplicePattern(body: Tree, args: List[Tree])(implicit src: SourceFile): SplicePattern = new SplicePattern(body, args) def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() def SingletonTypeTree(ref: Tree)(implicit src: SourceFile): SingletonTypeTree = new SingletonTypeTree(ref) @@ -414,11 +419,13 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def Template(constr: DefDef, parents: List[Tree], derived: List[Tree], self: ValDef, body: LazyTreeList)(implicit src: SourceFile): Template = if (derived.isEmpty) new Template(constr, parents, self, body) else new DerivingTemplate(constr, parents ++ derived, self, body, derived.length) + def Template(constr: DefDef, parents: LazyTreeList, self: ValDef, body: LazyTreeList)(implicit src: SourceFile): Template = + new Template(constr, parents, self, body) def Import(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Import = new Import(expr, selectors) def Export(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Export = new Export(expr, selectors) def PackageDef(pid: RefTree, stats: List[Tree])(implicit src: SourceFile): PackageDef = new PackageDef(pid, stats) def Annotated(arg: Tree, annot: Tree)(implicit src: SourceFile): Annotated = new Annotated(arg, annot) - def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(implicit src: SourceFile): Hole = new Hole(isTermHole, idx, args, content, tpt) + def Hole(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree)(implicit src: SourceFile): Hole = new Hole(isTerm, idx, args, content) // ------ Additional creation methods for untyped only ----------------- @@ -495,6 +502,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def captureRoot(using Context): Select = Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) + def makeRetaining(parent: Tree, refs: List[Tree], annotName: TypeName)(using Context): Annotated = + Annotated(parent, New(scalaAnnotationDot(annotName), List(refs))) + def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) @@ -614,14 +624,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case tree: Throw if expr eq tree.expr => tree case _ => finalize(tree, untpd.Throw(expr)(tree.source)) } - def Quote(tree: Tree)(quoted: Tree)(using Context): Tree = tree match { - case tree: Quote if quoted eq tree.quoted => tree - case _ => finalize(tree, untpd.Quote(quoted)(tree.source)) - } - def Splice(tree: Tree)(expr: Tree)(using Context): Tree = tree match { - case tree: Splice if expr eq tree.expr => tree - case _ => finalize(tree, untpd.Splice(expr)(tree.source)) - } def ForYield(tree: Tree)(enums: List[Tree], expr: Tree)(using Context): TermTree = tree match { case tree: ForYield if (enums eq tree.enums) && (expr eq tree.expr) => tree case _ => finalize(tree, untpd.ForYield(enums, expr)(tree.source)) @@ -649,6 +651,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def Into(tree: Tree)(tpt: Tree)(using Context): Tree = tree match + case tree: Into if tpt eq tree.tpt => tree + case _ => finalize(tree, untpd.Into(tpt)(tree.source)) def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) @@ -657,9 +662,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case tree: Number if (digits == tree.digits) && (kind == tree.kind) => tree case _ => finalize(tree, untpd.Number(digits, kind)) } - def CapturingTypeTree(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match - case tree: CapturingTypeTree if (refs eq tree.refs) && (parent eq tree.parent) => tree - case _ => finalize(tree, untpd.CapturingTypeTree(refs, parent)) + def CapturesAndResult(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match + case tree: CapturesAndResult if (refs eq tree.refs) && (parent eq tree.parent) => tree + case _ => finalize(tree, untpd.CapturesAndResult(refs, parent)) def TypedSplice(tree: Tree)(splice: tpd.Tree)(using Context): ProxyTree = tree match { case tree: TypedSplice if splice `eq` tree.splice => tree @@ -700,10 +705,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.Tuple(tree)(transform(trees)) case Throw(expr) => cpy.Throw(tree)(transform(expr)) - case Quote(t) => - cpy.Quote(tree)(transform(t)) - case Splice(expr) => - cpy.Splice(tree)(transform(expr)) case ForYield(enums, expr) => cpy.ForYield(tree)(transform(enums), transform(expr)) case ForDo(enums, body) => @@ -718,14 +719,16 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) case ExtMethods(paramss, methods) => cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case Into(tpt) => + cpy.Into(tree)(transform(tpt)) case ImportSelector(imported, renamed, bound) => cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) case Number(_, _) | TypedSplice(_) => tree case MacroTree(expr) => cpy.MacroTree(tree)(transform(expr)) - case CapturingTypeTree(refs, parent) => - cpy.CapturingTypeTree(tree)(transform(refs), transform(parent)) + case CapturesAndResult(refs, parent) => + cpy.CapturesAndResult(tree)(transform(refs), transform(parent)) case _ => super.transformMoreCases(tree) } @@ -759,10 +762,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(x, trees) case Throw(expr) => this(x, expr) - case Quote(t) => - this(x, t) - case Splice(expr) => - this(x, expr) case ForYield(enums, expr) => this(this(x, enums), expr) case ForDo(enums, body) => @@ -777,6 +776,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(this(x, pats), tpt), rhs) case ExtMethods(paramss, methods) => this(paramss.foldLeft(x)(apply), methods) + case Into(tpt) => + this(x, tpt) case ImportSelector(imported, renamed, bound) => this(this(this(x, imported), renamed), bound) case Number(_, _) => @@ -785,7 +786,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(x, splice) case MacroTree(expr) => this(x, expr) - case CapturingTypeTree(refs, parent) => + case CapturesAndResult(refs, parent) => this(this(x, refs), parent) case _ => super.foldMoreCases(x, tree) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 3261cb1d90f8..3ba26c92cab5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -6,6 +6,7 @@ import core.* import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* import ast.{tpd, untpd} import Decorators.*, NameOps.* +import config.SourceVersion import config.Printers.capt import util.Property.Key import tpd.* @@ -19,6 +20,9 @@ private[cc] def retainedElems(tree: Tree)(using Context): List[Tree] = tree matc case Apply(_, Typed(SeqLiteral(elems, _), _) :: Nil) => elems case _ => Nil +def allowUniversalInBoxed(using Context) = + Feature.sourceVersion.isAtLeast(SourceVersion.`3.3`) + /** An exception thrown if a @retains argument is not syntactically a CaptureRef */ class IllegalCaptureRef(tpe: Type) extends Exception @@ -146,7 +150,6 @@ extension (tp: Type) defn.FunctionType( fname.functionArity, isContextual = fname.isContextFunction, - isErased = fname.isErasedFunction, isImpure = true).appliedTo(args) case _ => tp @@ -166,8 +169,54 @@ extension (tp: Type) case CapturingType(_, _) => true case _ => false + def isEventuallyCapturingType(using Context): Boolean = + tp match + case EventuallyCapturingType(_, _) => true + case _ => false + + /** Is type known to be always pure by its class structure, + * so that adding a capture set to it would not make sense? + */ + def isAlwaysPure(using Context): Boolean = tp.dealias match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then sym.isPureClass + else tp.superType.isAlwaysPure + case CapturingType(parent, refs) => + parent.isAlwaysPure || refs.isAlwaysEmpty + case tp: TypeProxy => + tp.superType.isAlwaysPure + case tp: AndType => + tp.tp1.isAlwaysPure || tp.tp2.isAlwaysPure + case tp: OrType => + tp.tp1.isAlwaysPure && tp.tp2.isAlwaysPure + case _ => + false + +extension (cls: ClassSymbol) + + def pureBaseClass(using Context): Option[Symbol] = + cls.baseClasses.find(bc => + defn.pureBaseClasses.contains(bc) + || { + val selfType = bc.givenSelfType + selfType.exists && selfType.captureSet.isAlwaysEmpty + }) + extension (sym: Symbol) + /** A class is pure if: + * - one its base types has an explicitly declared self type with an empty capture set + * - or it is a value class + * - or it is an exception + * - or it is one of Nothing, Null, or String + */ + def isPureClass(using Context): Boolean = sym match + case cls: ClassSymbol => + cls.pureBaseClass.isDefined || defn.pureSimpleClasses.contains(cls) + case _ => + false + /** Does this symbol allow results carrying the universal capability? * Currently this is true only for function type applies (since their * results are unboxed) and `erasedValue` since this function is magic in diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index d3e32ac538a4..fdc4f66beafa 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -70,7 +70,7 @@ sealed abstract class CaptureSet extends Showable: assert(!isConst) asInstanceOf[Var] - /** Does this capture set contain the root reference `*` as element? */ + /** Does this capture set contain the root reference `cap` as element? */ final def isUniversal(using Context) = elems.exists { case ref: TermRef => ref.symbol == defn.captureRoot @@ -133,7 +133,7 @@ sealed abstract class CaptureSet extends Showable: * for `x` in a state where we assume all supersets of `x` have just the elements * known at this point. On the other hand if x's capture set has no known elements, * a set `cs` might account for `x` only if it subsumes `x` or it contains the - * root capability `*`. + * root capability `cap`. */ def mightAccountFor(x: CaptureRef)(using Context): Boolean = reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { @@ -222,7 +222,7 @@ sealed abstract class CaptureSet extends Showable: /** The largest subset (via <:<) of this capture set that only contains elements * for which `p` is true. */ - def filter(p: CaptureRef => Boolean)(using Context): CaptureSet = + def filter(p: Context ?=> CaptureRef => Boolean)(using Context): CaptureSet = if this.isConst then val elems1 = elems.filter(p) if elems1 == elems then this @@ -270,11 +270,16 @@ sealed abstract class CaptureSet extends Showable: def substParams(tl: BindingType, to: List[Type])(using Context) = map(Substituters.SubstParamsMap(tl, to)) - /** Invoke handler if this set has (or later aquires) the root capability `*` */ - def disallowRootCapability(handler: () => Unit)(using Context): this.type = + /** Invoke handler if this set has (or later aquires) the root capability `cap` */ + def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = if isUniversal then handler() this + /** Invoke handler on the elements to check wellformedness of the capture set */ + def ensureWellformed(handler: List[CaptureRef] => Context ?=> Unit)(using Context): this.type = + handler(elems.toList) + this + /** An upper approximation of this capture set, i.e. a constant set that is * subcaptured by this set. If the current set is a variable * it is the intersection of all upper approximations of known supersets @@ -319,7 +324,7 @@ object CaptureSet: /** The empty capture set `{}` */ val empty: CaptureSet.Const = Const(emptySet) - /** The universal capture set `{*}` */ + /** The universal capture set `{cap}` */ def universal(using Context): CaptureSet = defn.captureRoot.termRef.singletonCaptureSet @@ -372,8 +377,11 @@ object CaptureSet: def isConst = isSolved def isAlwaysEmpty = false - /** A handler to be invoked if the root reference `*` is added to this set */ - var addRootHandler: () => Unit = () => () + /** A handler to be invoked if the root reference `cap` is added to this set */ + var rootAddedHandler: () => Context ?=> Unit = () => () + + /** A handler to be invoked when new elems are added to this set */ + var newElemAddedHandler: List[CaptureRef] => Context ?=> Unit = _ => () var description: String = "" @@ -404,8 +412,9 @@ object CaptureSet: def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = if !isConst && recordElemsState() then elems ++= newElems - if isUniversal then addRootHandler() - // assert(id != 2 || elems.size != 2, this) + if isUniversal then rootAddedHandler() + newElemAddedHandler(newElems.toList) + // assert(id != 5 || elems.size != 3, this) (CompareResult.OK /: deps) { (r, dep) => r.andAlso(dep.tryInclude(newElems, this)) } @@ -421,15 +430,19 @@ object CaptureSet: else CompareResult.fail(this) - override def disallowRootCapability(handler: () => Unit)(using Context): this.type = - addRootHandler = handler + override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = + rootAddedHandler = handler super.disallowRootCapability(handler) + override def ensureWellformed(handler: List[CaptureRef] => (Context) ?=> Unit)(using Context): this.type = + newElemAddedHandler = handler + super.ensureWellformed(handler) + private var computingApprox = false /** Roughly: the intersection of all constant known supersets of this set. * The aim is to find an as-good-as-possible constant set that is a superset - * of this set. The universal set {*} is a sound fallback. + * of this set. The universal set {cap} is a sound fallback. */ final def upperApprox(origin: CaptureSet)(using Context): CaptureSet = if computingApprox then universal @@ -546,7 +559,7 @@ object CaptureSet: else CompareResult.fail(this) } .andAlso { - if (origin ne source) && mapIsIdempotent then + if (origin ne source) && (origin ne initial) && mapIsIdempotent then // `tm` is idempotent, propagate back elems from image set. // This is sound, since we know that for `r in newElems: tm(r) = r`, hence // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. @@ -559,7 +572,7 @@ object CaptureSet: // elements from variable sources in contra- and non-variant positions. In essence, // we approximate types resulting from such maps by returning a possible super type // from the actual type. But this is neither sound nor complete. - report.warning(i"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") + report.warning(em"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") CompareResult.fail(this) else CompareResult.OK @@ -613,7 +626,7 @@ object CaptureSet: /** A variable with elements given at any time as { x <- source.elems | p(x) } */ class Filtered private[CaptureSet] - (val source: Var, p: CaptureRef => Boolean)(using @constructorOnly ctx: Context) + (val source: Var, p: Context ?=> CaptureRef => Boolean)(using @constructorOnly ctx: Context) extends DerivedVar(source.elems.filter(p)): override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = diff --git a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala index e9862f1f20b8..a7c283f4cc3b 100644 --- a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala +++ b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala @@ -48,6 +48,16 @@ object CapturingType: EventuallyCapturingType.unapply(tp) else None + /** Check whether a type is uncachable when computing `baseType`. + * - Avoid caching all the types during the setup phase, since at that point + * the capture set variables are not fully installed yet. + * - Avoid caching capturing types when IgnoreCaptures mode is set, since the + * capture sets may be thrown away in the computed base type. + */ + def isUncachable(tp: Type)(using Context): Boolean = + ctx.phase == Phases.checkCapturesPhase && + (Setup.isDuringSetup || ctx.mode.is(Mode.IgnoreCaptures) && tp.isEventuallyCapturingType) + end CapturingType /** An extractor for types that will be capturing types at phase CheckCaptures. Also diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index cf1d4266e89b..380b6ce5fb81 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -10,7 +10,8 @@ import config.Printers.{capt, recheckr} import config.{Config, Feature} import ast.{tpd, untpd, Trees} import Trees.* -import typer.RefChecks.{checkAllOverrides, checkParents} +import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPairsChecker} +import typer.Checking.{checkBounds, checkAppliedTypesIn} import util.{SimpleIdentitySet, EqHashMap, SrcPos} import transform.SymUtils.* import transform.{Recheck, PreRecheck} @@ -18,6 +19,7 @@ import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} import StdNames.nme +import NameKinds.DefaultGetterName import reporting.trace /** The capture checker */ @@ -50,12 +52,12 @@ object CheckCaptures: * @param outer0 the next enclosing environment */ case class Env( - owner: Symbol, - nestedInOwner: Boolean, - captured: CaptureSet, - isBoxed: Boolean, - outer0: Env | Null - ): + owner: Symbol, + nestedInOwner: Boolean, + captured: CaptureSet, + isBoxed: Boolean, + outer0: Env | Null): + def outer = outer0.nn def isOutermost = outer0 == null @@ -70,16 +72,23 @@ object CheckCaptures: */ final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) extends ApproximatingTypeMap, IdempotentCaptRefMap: - def apply(tp: Type): Type = tp match - case tp: ParamRef => - if tp.binder == from then to(tp.paramNum) else tp - case tp: NamedType => - if tp.prefix `eq` NoPrefix then tp - else tp.derivedSelect(apply(tp.prefix)) - case _: ThisType => - tp - case _ => - mapOver(tp) + /** This SubstParamsMap is exact if `to` only contains `CaptureRef`s. */ + private val isExactSubstitution: Boolean = to.forall(_.isInstanceOf[CaptureRef]) + + /** As long as this substitution is exact, there is no need to create `Range`s when mapping invariant positions. */ + override protected def needsRangeIfInvariant(refs: CaptureSet): Boolean = !isExactSubstitution + + def apply(tp: Type): Type = + tp match + case tp: ParamRef => + if tp.binder == from then to(tp.paramNum) else tp + case tp: NamedType => + if tp.prefix `eq` NoPrefix then tp + else tp.derivedSelect(apply(tp.prefix)) + case _: ThisType => + tp + case _ => + mapOver(tp) /** Check that a @retains annotation only mentions references that can be tracked. * This check is performed at Typer. @@ -126,6 +135,20 @@ object CheckCaptures: if remaining.accountsFor(firstRef) then report.warning(em"redundant capture: $remaining already accounts for $firstRef", ann.srcPos) + def disallowRootCapabilitiesIn(tp: Type, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = + val check = new TypeTraverser: + def traverse(t: Type) = + if variance >= 0 then + t.captureSet.disallowRootCapability: () => + def part = if t eq tp then "" else i"the part $t of " + report.error( + em"""$what cannot $have $tp since + |${part}that type captures the root capability `cap`. + |$addendum""", + pos) + traverseChildren(t) + check.traverse(tp) + class CheckCaptures extends Recheck, SymTransformer: thisPhase => @@ -139,25 +162,12 @@ class CheckCaptures extends Recheck, SymTransformer: override def run(using Context): Unit = if Feature.ccEnabled then - checkOverrides.traverse(ctx.compilationUnit.tpdTree) super.run override def transformSym(sym: SymDenotation)(using Context): SymDenotation = if Synthetics.needsTransform(sym) then Synthetics.transformFromCC(sym) else super.transformSym(sym) - /** Check overrides again, taking capture sets into account. - * TODO: Can we avoid doing overrides checks twice? - * We need to do them here since only at this phase CaptureTypes are relevant - * But maybe we can then elide the check during the RefChecks phase under captureChecking? - */ - def checkOverrides = new TreeTraverser: - def traverse(t: Tree)(using Context) = - t match - case t: Template => checkAllOverrides(ctx.owner.asClass) - case _ => - traverseChildren(t) - class CaptureChecker(ictx: Context) extends Rechecker(ictx): import ast.tpd.* @@ -201,7 +211,7 @@ class CheckCaptures extends Recheck, SymTransformer: def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos)(using Context) = val res = elem.singletonCaptureSet.subCaptures(cs, frozen = false) if !res.isOK then - report.error(i"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) + report.error(em"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) /** Check subcapturing `cs1 <: cs2`, report error on failure */ def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos)(using Context) = @@ -210,7 +220,7 @@ class CheckCaptures extends Recheck, SymTransformer: def header = if cs1.elems.size == 1 then i"reference ${cs1.elems.toList}%, % is not" else i"references $cs1 are not all" - report.error(i"$header included in allowed capture set ${res.blocking}", pos) + report.error(em"$header included in allowed capture set ${res.blocking}", pos) /** The current environment */ private var curEnv: Env = Env(NoSymbol, nestedInOwner = false, CaptureSet.empty, isBoxed = false, null) @@ -335,12 +345,21 @@ class CheckCaptures extends Recheck, SymTransformer: override def recheckApply(tree: Apply, pt: Type)(using Context): Type = val meth = tree.fun.symbol includeCallCaptures(meth, tree.srcPos) - if meth == defn.Caps_unsafeBox || meth == defn.Caps_unsafeUnbox then + def mapArgUsing(f: Type => Type) = val arg :: Nil = tree.args: @unchecked - val argType0 = recheckStart(arg, pt) - .forceBoxStatus(boxed = meth == defn.Caps_unsafeBox) + val argType0 = f(recheckStart(arg, pt)) val argType = super.recheckFinish(argType0, arg, pt) super.recheckFinish(argType, tree, pt) + + if meth == defn.Caps_unsafeBox then + mapArgUsing(_.forceBoxStatus(true)) + else if meth == defn.Caps_unsafeUnbox then + mapArgUsing(_.forceBoxStatus(false)) + else if meth == defn.Caps_unsafeBoxFunArg then + mapArgUsing { + case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual) => + defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual) + } else super.recheckApply(tree, pt) match case appType @ CapturingType(appType1, refs) => @@ -432,9 +451,10 @@ class CheckCaptures extends Recheck, SymTransformer: block match case closureDef(mdef) => pt.dealias match - case defn.FunctionOf(ptformals, _, _, _) if ptformals.forall(_.captureSet.isAlwaysEmpty) => + case defn.FunctionOf(ptformals, _, _) + if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => // Redo setup of the anonymous function so that formal parameters don't - // get capture sets. This is important to avoid false widenings to `*` + // get capture sets. This is important to avoid false widenings to `cap` // when taking the base type of the actual closures's dependent function // type so that it conforms to the expected non-dependent function type. // See withLogFile.scala for a test case. @@ -442,9 +462,10 @@ class CheckCaptures extends Recheck, SymTransformer: // First, undo the previous setup which installed a completer for `meth`. atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) .installAfter(preRecheckPhase) + // Next, update all parameter symbols to match expected formals meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => - psym.copySymDenotation(info = pformal).installAfter(preRecheckPhase) + psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) } // Next, update types of parameter ValDefs mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => @@ -452,36 +473,21 @@ class CheckCaptures extends Recheck, SymTransformer: tpt.rememberTypeAlways(pformal) } // Next, install a new completer reflecting the new parameters for the anonymous method + val mt = meth.info.asInstanceOf[MethodType] val completer = new LazyType: def complete(denot: SymDenotation)(using Context) = - denot.info = MethodType(ptformals, mdef.tpt.knownType) + denot.info = mt.companion(ptformals, mdef.tpt.knownType) .showing(i"simplify info of $meth to $result", capt) recheckDef(mdef, meth) - meth.copySymDenotation(info = completer, initFlags = meth.flags &~ Touched) - .installAfter(preRecheckPhase) + meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) case _ => case _ => super.recheckBlock(block, pt) - /** If `rhsProto` has `*` as its capture set, wrap `rhs` in a `unsafeBox`. - * Used to infer `unsafeBox` for expressions that get assigned to variables - * that have universal capture set. - */ - def maybeBox(rhs: Tree, rhsProto: Type)(using Context): Tree = - if rhsProto.captureSet.isUniversal then - ref(defn.Caps_unsafeBox).appliedToType(rhsProto).appliedTo(rhs) - else rhs - - override def recheckAssign(tree: Assign)(using Context): Type = - val rhsProto = recheck(tree.lhs).widen - recheck(maybeBox(tree.rhs, rhsProto), rhsProto) - defn.UnitType - override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Unit = try if !sym.is(Module) then // Modules are checked by checking the module class - if sym.is(Mutable) then recheck(maybeBox(tree.rhs, sym.info), sym.info) - else super.recheckValDef(tree, sym) + super.recheckValDef(tree, sym) finally if !sym.is(Param) then // Parameters with inferred types belong to anonymous methods. We need to wait @@ -503,7 +509,8 @@ class CheckCaptures extends Recheck, SymTransformer: /** Class-specific capture set relations: * 1. The capture set of a class includes the capture sets of its parents. * 2. The capture set of the self type of a class includes the capture set of the class. - * 3. The capture set of the self type of a class includes the capture set of every class parameter. + * 3. The capture set of the self type of a class includes the capture set of every class parameter, + * unless the parameter is marked @constructorOnly. */ override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = val saved = curEnv @@ -515,7 +522,12 @@ class CheckCaptures extends Recheck, SymTransformer: val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") checkSubset(localSet, thisSet, tree.srcPos) // (2) for param <- cls.paramGetters do - checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then + checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + for pureBase <- cls.pureBaseClass do + checkSubset(thisSet, + CaptureSet.empty.withDescription(i"of pure base class $pureBase"), + tree.srcPos) super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -534,6 +546,15 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => super.recheckTyped(tree) + override def recheckTry(tree: Try, pt: Type)(using Context): Type = + val tp = super.recheckTry(tree, pt) + if allowUniversalInBoxed && Feature.enabled(Feature.saferExceptions) then + disallowRootCapabilitiesIn(tp, + "Result of `try`", "have type", + "This is often caused by a locally generated exception capability leaking as part of its result.", + tree.srcPos) + tp + /* Currently not needed, since capture checking takes place after ElimByName. * Keep around in case we need to get back to it def recheckByNameArg(tree: Tree, pt: Type)(using Context): Type = @@ -591,22 +612,39 @@ class CheckCaptures extends Recheck, SymTransformer: refs.disallowRootCapability { () => val kind = if tree.isInstanceOf[ValDef] then "mutable variable" else "expression" report.error( - em"""The $kind's type $wtp is not allowed to capture the root capability `*`. + em"""The $kind's type $wtp is not allowed to capture the root capability `cap`. |This usually means that a capability persists longer than its allowed lifetime.""", tree.srcPos) } checkNotUniversal(parent) case _ => - checkNotUniversal(typeToCheck) + if !allowUniversalInBoxed then checkNotUniversal(typeToCheck) super.recheckFinish(tpe, tree, pt) /** Massage `actual` and `expected` types using the methods below before checking conformance */ override def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = - val expected1 = addOuterRefs(expected, actual) + val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) val actual1 = adaptBoxed(actual, expected1, tree.srcPos) //println(i"check conforms $actual1 <<< $expected1") super.checkConformsExpr(actual1, expected1, tree) + private def toDepFun(args: List[Type], resultType: Type, isContextual: Boolean)(using Context): Type = + MethodType.companion(isContextual = isContextual)(args, resultType) + .toFunctionType(isJava = false, alwaysDependent = true) + + /** Turn `expected` into a dependent function when `actual` is dependent. */ + private def alignDependentFunction(expected: Type, actual: Type)(using Context): Type = + def recur(expected: Type): Type = expected.dealias match + case expected @ CapturingType(eparent, refs) => + CapturingType(recur(eparent), refs, boxed = expected.isBoxed) + case expected @ defn.FunctionOf(args, resultType, isContextual) + if defn.isNonRefinedFunction(expected) && defn.isFunctionType(actual) && !defn.isNonRefinedFunction(actual) => + val expected1 = toDepFun(args, resultType, isContextual) + expected1 + case _ => + expected + recur(expected) + /** For the expected type, implement the rule outlined in #14390: * - when checking an expression `a: Ca Ta` against an expected type `Ce Te`, * - where the capture set `Ce` contains Cls.this, @@ -647,8 +685,11 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => expected - /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions */ - def adaptBoxed(actual: Type, expected: Type, pos: SrcPos)(using Context): Type = + /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions + * + * @param alwaysConst always make capture set variables constant after adaptation + */ + def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, alwaysConst: Boolean = false)(using Context): Type = /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) * to `expected` type. @@ -664,7 +705,7 @@ class CheckCaptures extends Recheck, SymTransformer: try val (eargs, eres) = expected.dealias.stripCapturing match - case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) + case defn.FunctionOf(eargs, eres, _) => (eargs, eres) case expected: MethodType => (expected.paramInfos, expected.resType) case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) case _ => (aargs.map(_ => WildcardType), WildcardType) @@ -710,83 +751,77 @@ class CheckCaptures extends Recheck, SymTransformer: val arrow = if covariant then "~~>" else "<~~" i"adapting $actual $arrow $expected" - /** Destruct a capturing type `tp` to a tuple (cs, tp0, boxed), - * where `tp0` is not a capturing type. - * - * If `tp` is a nested capturing type, the return tuple always represents - * the innermost capturing type. The outer capture annotations can be - * reconstructed with the returned function. - */ - def destructCapturingType(tp: Type, reconstruct: Type => Type = x => x): ((Type, CaptureSet, Boolean), Type => Type) = - tp.dealias match - case tp @ CapturingType(parent, cs) => - if parent.dealias.isCapturingType then - destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) - else - ((parent, cs, tp.isBoxed), reconstruct) - case actual => - ((actual, CaptureSet(), false), reconstruct) - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { if expected.isInstanceOf[WildcardType] then actual else - val ((parent, cs, actualIsBoxed), recon) = destructCapturingType(actual) - - val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing - val insertBox = needsAdaptation && covariant != actualIsBoxed - - val (parent1, cs1) = parent match { + // Decompose the actual type into the inner shape type, the capture set and the box status + val styp = if actual.isFromJavaObject then actual else actual.stripCapturing + val cs = actual.captureSet + val boxed = actual.isBoxedCapturing + + // A box/unbox should be inserted, if the actual box status mismatches with the expectation + val needsAdaptation = boxed != expected.isBoxedCapturing + // Whether to insert a box or an unbox? + val insertBox = needsAdaptation && covariant != boxed + + // Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation + val (styp1, leaked) = styp match { case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => - val (parent1, leaked) = adaptFun(parent, args.init, args.last, expected, covariant, insertBox, + adaptFun(actual, args.init, args.last, expected, covariant, insertBox, (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) - (parent1, leaked ++ cs) - case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => + case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionOrPolyType(actual) => // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) - val (parent1, leaked) = adaptFun(parent, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, + adaptFun(actual, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, (aargs1, ares1) => rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) .toFunctionType(isJava = false, alwaysDependent = true)) - (parent1, leaked ++ cs) case actual: MethodType => - val (parent1, leaked) = adaptFun(parent, actual.paramInfos, actual.resType, expected, covariant, insertBox, + adaptFun(actual, actual.paramInfos, actual.resType, expected, covariant, insertBox, (aargs1, ares1) => actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) - (parent1, leaked ++ cs) case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => - val (parent1, leaked) = adaptTypeFun(parent, rinfo.resType, expected, covariant, insertBox, + adaptTypeFun(actual, rinfo.resType, expected, covariant, insertBox, ares1 => val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) val actual1 = actual.derivedRefinedType(p, nme, rinfo1) actual1 ) - (parent1, leaked ++ cs) case _ => - (parent, cs) + (styp, CaptureSet()) } + // Capture set of the term after adaptation + val cs1 = cs ++ leaked + + // Compute the adapted type + def adaptedType(resultBoxed: Boolean) = + styp1.capturing(if alwaysConst then CaptureSet(cs1.elems) else cs1).forceBoxStatus(resultBoxed) + if needsAdaptation then - val criticalSet = // the set which is not allowed to have `*` - if covariant then cs1 // can't box with `*` - else expected.captureSet // can't unbox with `*` - if criticalSet.isUniversal && expected.isValueType then + val criticalSet = // the set which is not allowed to have `cap` + if covariant then cs1 // can't box with `cap` + else expected.captureSet // can't unbox with `cap` + if criticalSet.isUniversal && expected.isValueType && !allowUniversalInBoxed then // We can't box/unbox the universal capability. Leave `actual` as it is // so we get an error in checkConforms. This tends to give better error // messages than disallowing the root capability in `criticalSet`. - capt.println(i"cannot box/unbox $actual vs $expected") + if ctx.settings.YccDebug.value then + println(i"cannot box/unbox $actual vs $expected") actual else - // Disallow future addition of `*` to `criticalSet`. - criticalSet.disallowRootCapability { () => - report.error( - em"""$actual cannot be box-converted to $expected - |since one of their capture sets contains the root capability `*`""", - pos) - } + if !allowUniversalInBoxed then + // Disallow future addition of `cap` to `criticalSet`. + criticalSet.disallowRootCapability { () => + report.error( + em"""$actual cannot be box-converted to $expected + |since one of their capture sets contains the root capability `cap`""", + pos) + } if !insertBox then // unboxing markFree(criticalSet, pos) - recon(CapturingType(parent1, cs1, !actualIsBoxed)) + adaptedType(!boxed) else - recon(CapturingType(parent1, cs1, actualIsBoxed)) + adaptedType(boxed) } var actualw = actual.widenDealias @@ -805,12 +840,49 @@ class CheckCaptures extends Recheck, SymTransformer: else actual end adaptBoxed + /** Check overrides again, taking capture sets into account. + * TODO: Can we avoid doing overrides checks twice? + * We need to do them here since only at this phase CaptureTypes are relevant + * But maybe we can then elide the check during the RefChecks phase under captureChecking? + */ + def checkOverrides = new TreeTraverser: + class OverridingPairsCheckerCC(clazz: ClassSymbol, self: Type, srcPos: SrcPos)(using Context) extends OverridingPairsChecker(clazz, self) { + /** Check subtype with box adaptation. + * This function is passed to RefChecks to check the compatibility of overriding pairs. + * @param sym symbol of the field definition that is being checked + */ + override def checkSubType(actual: Type, expected: Type)(using Context): Boolean = + val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) + val actual1 = + val saved = curEnv + try + curEnv = Env(clazz, nestedInOwner = true, capturedVars(clazz), isBoxed = false, outer0 = curEnv) + val adapted = adaptBoxed(actual, expected1, srcPos, alwaysConst = true) + actual match + case _: MethodType => + // We remove the capture set resulted from box adaptation for method types, + // since class methods are always treated as pure, and their captured variables + // are charged to the capture set of the class (which is already done during + // box adaptation). + adapted.stripCapturing + case _ => adapted + finally curEnv = saved + actual1 frozen_<:< expected1 + } + + def traverse(t: Tree)(using Context) = + t match + case t: Template => + checkAllOverrides(ctx.owner.asClass, OverridingPairsCheckerCC(_, _, t)) + case _ => + traverseChildren(t) + override def checkUnit(unit: CompilationUnit)(using Context): Unit = - Setup(preRecheckPhase, thisPhase, recheckDef) - .traverse(ctx.compilationUnit.tpdTree) + Setup(preRecheckPhase, thisPhase, recheckDef)(ctx.compilationUnit.tpdTree) //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") withCaptureSetsExplained { super.checkUnit(unit) + checkOverrides.traverse(unit.tpdTree) checkSelfTypes(unit.tpdTree) postCheck(unit.tpdTree) if ctx.settings.YccDebug.value then @@ -819,7 +891,7 @@ class CheckCaptures extends Recheck, SymTransformer: /** Check that self types of subclasses conform to self types of super classes. * (See comment below how this is achieved). The check assumes that classes - * without an explicit self type have the universal capture set `{*}` on the + * without an explicit self type have the universal capture set `{cap}` on the * self type. If a class without explicit self type is not `effectivelyFinal` * it is checked that the inferred self type is universal, in order to assure * that joint and separate compilation give the same result. @@ -845,65 +917,180 @@ class CheckCaptures extends Recheck, SymTransformer: cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) } assert(roots.nonEmpty) - for root <- roots do - checkParents(root, parentTrees(root)) + for case root: ClassSymbol <- roots do + checkSelfAgainstParents(root, root.baseClasses) val selfType = root.asClass.classInfo.selfType interpolator(startingVariance = -1).traverse(selfType) if !root.isEffectivelySealed then + def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = + cls.baseClasses.tail.exists { psym => + val selfType = psym.asClass.givenSelfType + selfType.exists && selfType.captureSet.elems == refs.elems + } selfType match - case CapturingType(_, refs: CaptureSet.Var) if !refs.isUniversal => + case CapturingType(_, refs: CaptureSet.Var) + if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => + // Forbid inferred self types unless they are already implied by an explicit + // self type in a parent. report.error( - i"""$root needs an explicitly declared self type since its - |inferred self type $selfType - |is not visible in other compilation units that define subclasses.""", + em"""$root needs an explicitly declared self type since its + |inferred self type $selfType + |is not visible in other compilation units that define subclasses.""", root.srcPos) case _ => parentTrees -= root capt.println(i"checked $root with $selfType") end checkSelfTypes + /** Heal ill-formed capture sets in the type parameter. + * + * We can push parameter refs into a capture set in type parameters + * that this type parameter can't see. + * For example, when capture checking the following expression: + * + * def usingLogFile[T](op: (f: {cap} File) => T): T = ... + * + * usingLogFile[box ?1 () -> Unit] { (f: {cap} File) => () => { f.write(0) } } + * + * We may propagate `f` into ?1, making ?1 ill-formed. + * This also causes soundness issues, since `f` in ?1 should be widened to `cap`, + * giving rise to an error that `cap` cannot be included in a boxed capture set. + * + * To solve this, we still allow ?1 to capture parameter refs like `f`, but + * compensate this by pushing the widened capture set of `f` into ?1. + * This solves the soundness issue caused by the ill-formness of ?1. + */ + private def healTypeParam(tree: Tree)(using Context): Unit = + val checker = new TypeTraverser: + private def isAllowed(ref: CaptureRef): Boolean = ref match + case ref: TermParamRef => allowed.contains(ref) + case _ => true + + // Widen the given term parameter refs x₁ : C₁ S₁ , ⋯ , xₙ : Cₙ Sₙ to their capture sets C₁ , ⋯ , Cₙ. + // + // If in these capture sets there are any capture references that are term parameter references we should avoid, + // we will widen them recursively. + private def widenParamRefs(refs: List[TermParamRef]): List[CaptureSet] = + @scala.annotation.tailrec + def recur(todos: List[TermParamRef], acc: List[CaptureSet]): List[CaptureSet] = + todos match + case Nil => acc + case ref :: rem => + val cs = ref.captureSetOfInfo + val nextAcc = cs.filter(isAllowed(_)) :: acc + val nextRem: List[TermParamRef] = (cs.elems.toList.filter(!isAllowed(_)) ++ rem).asInstanceOf + recur(nextRem, nextAcc) + recur(refs, Nil) + + private def healCaptureSet(cs: CaptureSet): Unit = + def avoidance(elems: List[CaptureRef])(using Context): Unit = + val toInclude = widenParamRefs(elems.filter(!isAllowed(_)).asInstanceOf) + //println(i"HEAL $cs by widening to $toInclude") + toInclude.foreach(checkSubset(_, cs, tree.srcPos)) + cs.ensureWellformed(avoidance) + + private var allowed: SimpleIdentitySet[TermParamRef] = SimpleIdentitySet.empty + + def traverse(tp: Type) = + tp match + case CapturingType(parent, refs) => + healCaptureSet(refs) + traverse(parent) + case tp @ RefinedType(parent, rname, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => + traverse(rinfo) + case tp: TermLambda => + val saved = allowed + try + tp.paramRefs.foreach(allowed += _) + traverseChildren(tp) + finally allowed = saved + case _ => + traverseChildren(tp) + + if tree.isInstanceOf[InferredTypeTree] then + checker.traverse(tree.knownType) + end healTypeParam + /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that externally visible `val`s or `def`s have empty capture sets. If not, * suggest an explicit type. This is so that separate compilation (where external * symbols have empty capture sets) gives the same results as joint compilation. + * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. + * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. */ def postCheck(unit: tpd.Tree)(using Context): Unit = - unit.foreachSubTree { - case _: InferredTypeTree => - case tree: TypeTree if !tree.span.isZeroExtent => - tree.knownType.foreachPart { tp => - checkWellformedPost(tp, tree.srcPos) - tp match - case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => - warnIfRedundantCaptureSet(annot.tree) + val checker = new TreeTraverser: + def traverse(tree: Tree)(using Context): Unit = + traverseChildren(tree) + check(tree) + def check(tree: Tree) = tree match + case _: InferredTypeTree => + case tree: TypeTree if !tree.span.isZeroExtent => + tree.knownType.foreachPart { tp => + checkWellformedPost(tp, tree.srcPos) + tp match + case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => + warnIfRedundantCaptureSet(annot.tree) + case _ => + } + case t: ValOrDefDef + if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => + val sym = t.symbol + val isLocal = + sym.owner.ownersIterator.exists(_.isTerm) + || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) + def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly + sym.is(Private) // private symbols can always have inferred types + || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be + // too annoying. This is a hole since a defualt getter's result type + // might leak into a type variable. + || // non-local symbols cannot have inferred types since external capture types are not inferred + isLocal // local symbols still need explicit types if + && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference + def isNotPureThis(ref: CaptureRef) = ref match { + case ref: ThisType => !ref.cls.isPureClass + case _ => true + } + if !canUseInferred then + val inferred = t.tpt.knownType + def checkPure(tp: Type) = tp match + case CapturingType(_, refs) + if !refs.elems.filter(isNotPureThis).isEmpty => + val resultStr = if t.isInstanceOf[DefDef] then " result" else "" + report.error( + em"""Non-local $sym cannot have an inferred$resultStr type + |$inferred + |with non-empty capture set $refs. + |The type needs to be declared explicitly.""".withoutDisambiguation(), + t.srcPos) + case _ => + inferred.foreachPart(checkPure, StopAt.Static) + case t @ TypeApply(fun, args) => + fun.knownType.widen match + case tl: PolyType => + val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => + arg.withType(arg.knownType.forceBoxStatus( + bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) + } + checkBounds(normArgs, tl) case _ => - } - case t: ValOrDefDef - if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => - val sym = t.symbol - val isLocal = - sym.owner.ownersIterator.exists(_.isTerm) - || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) - - // The following classes of definitions need explicit capture types ... - if !isLocal // ... since external capture types are not inferred - || sym.owner.is(Trait) // ... since we do OverridingPairs checking before capture inference - || sym.allOverriddenSymbols.nonEmpty // ... since we do override checking before capture inference - then - val inferred = t.tpt.knownType - def checkPure(tp: Type) = tp match - case CapturingType(_, refs) if !refs.elems.isEmpty => - val resultStr = if t.isInstanceOf[DefDef] then " result" else "" - report.error( - em"""Non-local $sym cannot have an inferred$resultStr type - |$inferred - |with non-empty capture set $refs. - |The type needs to be declared explicitly.""", t.srcPos) - case _ => - inferred.foreachPart(checkPure, StopAt.Static) - case _ => - } + args.foreach(healTypeParam(_)) + case _ => + end check + end checker + checker.traverse(unit) + if !ctx.reporter.errorsReported then + // We dont report errors here if previous errors were reported, because other + // errors often result in bad applied types, but flagging these bad types gives + // often worse error messages than the original errors. + val checkApplied = new TreeTraverser: + def traverse(t: Tree)(using Context) = t match + case tree: InferredTypeTree => + case tree: New => + case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) + case _ => traverseChildren(t) + checkApplied.traverse(unit) end CaptureChecker end CheckCaptures diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 42c80e524a6e..bbe54f14b86c 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -11,6 +11,8 @@ import ast.tpd import transform.Recheck.* import CaptureSet.IdentityCaptRefMap import Synthetics.isExcluded +import util.Property +import dotty.tools.dotc.core.Annotations.Annotation /** A tree traverser that prepares a compilation unit to be capture checked. * It does the following: @@ -37,7 +39,6 @@ extends tpd.TreeTraverser: private def depFun(tycon: Type, argTypes: List[Type], resType: Type)(using Context): Type = MethodType.companion( isContextual = defn.isContextFunctionClass(tycon.classSymbol), - isErased = defn.isErasedFunctionClass(tycon.classSymbol) )(argTypes, resType) .toFunctionType(isJava = false, alwaysDependent = true) @@ -53,7 +54,7 @@ extends tpd.TreeTraverser: val boxedRes = recur(res) if boxedRes eq res then tp else tp1.derivedAppliedType(tycon, args.init :+ boxedRes) - case tp1 @ RefinedType(_, _, rinfo) if defn.isFunctionType(tp1) => + case tp1 @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionOrPolyType(tp1) => val boxedRinfo = recur(rinfo) if boxedRinfo eq rinfo then tp else boxedRinfo.toFunctionType(isJava = false, alwaysDependent = true) @@ -98,7 +99,10 @@ extends tpd.TreeTraverser: def addCaptureRefinements(tp: Type): Type = tp match case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => tp.typeSymbol match - case cls: ClassSymbol if !defn.isFunctionClass(cls) => + case cls: ClassSymbol + if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => + // We assume that Java classes can refer to capturing Scala types only indirectly, + // using type parameters. Hence, no need to refine them. cls.paramGetters.foldLeft(tp) { (core, getter) => if getter.termRef.isTracked then val getterType = tp.memberInfo(getter).strippedDealias @@ -117,14 +121,14 @@ extends tpd.TreeTraverser: case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol if sym.isClass then - tp.typeSymbol == defn.AnyClass - // we assume Any is a shorthand of {*} Any, so if Any is an upper + sym == defn.AnyClass + // we assume Any is a shorthand of {cap} Any, so if Any is an upper // bound, the type is taken to be impure. else superTypeIsImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => superTypeIsImpure(tp.underlying) case tp: AndType => - superTypeIsImpure(tp.tp1) || canHaveInferredCapture(tp.tp2) + superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) case tp: OrType => superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) case _ => @@ -132,23 +136,26 @@ extends tpd.TreeTraverser: }.showing(i"super type is impure $tp = $result", capt) /** Should a capture set variable be added on type `tp`? */ - def canHaveInferredCapture(tp: Type): Boolean = { + def needsVariable(tp: Type): Boolean = { tp.typeParams.isEmpty && tp.match case tp: (TypeRef | AppliedType) => val tp1 = tp.dealias - if tp1 ne tp then canHaveInferredCapture(tp1) + if tp1 ne tp then needsVariable(tp1) else val sym = tp1.typeSymbol - if sym.isClass then !sym.isValueClass && sym != defn.AnyClass + if sym.isClass then + !sym.isPureClass && sym != defn.AnyClass else superTypeIsImpure(tp1) case tp: (RefinedOrRecType | MatchType) => - canHaveInferredCapture(tp.underlying) + needsVariable(tp.underlying) case tp: AndType => - canHaveInferredCapture(tp.tp1) && canHaveInferredCapture(tp.tp2) + needsVariable(tp.tp1) && needsVariable(tp.tp2) case tp: OrType => - canHaveInferredCapture(tp.tp1) || canHaveInferredCapture(tp.tp2) - case CapturingType(_, refs) => - refs.isConst && !refs.isUniversal + needsVariable(tp.tp1) || needsVariable(tp.tp2) + case CapturingType(parent, refs) => + needsVariable(parent) + && refs.isConst // if refs is a variable, no need to add another + && !refs.isUniversal // if refs is {cap}, an added variable would not change anything case _ => false }.showing(i"can have inferred capture $tp = $result", capt) @@ -181,7 +188,7 @@ extends tpd.TreeTraverser: CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) - case _ if canHaveInferredCapture(tp) => + case _ if needsVariable(tp) => val cs = tp.dealias match case CapturingType(_, refs) => CaptureSet.Var(refs.elems) case _ => CaptureSet.Var() @@ -206,20 +213,25 @@ extends tpd.TreeTraverser: val tycon1 = this(tycon) if defn.isNonRefinedFunction(tp) then // Convert toplevel generic function types to dependent functions - val args0 = args.init - var res0 = args.last - val args1 = mapNested(args0) - val res1 = this(res0) - if isTopLevel then - depFun(tycon1, args1, res1) - .showing(i"add function refinement $tp --> $result", capt) - else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then - tp + if !defn.isFunctionSymbol(tp.typeSymbol) && (tp.dealias ne tp) then + // This type is a function after dealiasing, so we dealias and recurse. + // See #15925. + this(tp.dealias) else - tp.derivedAppliedType(tycon1, args1 :+ res1) + val args0 = args.init + var res0 = args.last + val args1 = mapNested(args0) + val res1 = this(res0) + if isTopLevel then + depFun(tycon1, args1, res1) + .showing(i"add function refinement $tp ($tycon1, $args1, $res1) (${tp.dealias}) --> $result", capt) + else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then + tp + else + tp.derivedAppliedType(tycon1, args1 :+ res1) else tp.derivedAppliedType(tycon1, args.mapConserve(arg => this(arg))) - case tp @ RefinedType(core, rname, rinfo) if defn.isFunctionType(tp) => + case tp @ RefinedType(core, rname, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => val rinfo1 = apply(rinfo) if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) else tp @@ -248,7 +260,13 @@ extends tpd.TreeTraverser: private def expandThrowsAlias(tp: Type)(using Context) = tp match case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => // hard-coded expansion since $throws aliases in stdlib are defined with `?=>` rather than `?->` - defn.FunctionOf(defn.CanThrowClass.typeRef.appliedTo(exc) :: Nil, res, isContextual = true, isErased = true) + defn.FunctionOf( + AnnotatedType( + defn.CanThrowClass.typeRef.appliedTo(exc), + Annotation(defn.ErasedParamAnnot, defn.CanThrowClass.span)) :: Nil, + res, + isContextual = true + ) case _ => tp private def expandThrowsAliases(using Context) = new TypeMap: @@ -311,7 +329,7 @@ extends tpd.TreeTraverser: args.last, CaptureSet.empty, currentCs ++ outerCs) tp.derivedAppliedType(tycon1, args1 :+ resType1) tp1.capturing(outerCs) - case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionType(tp) => + case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => propagateDepFunctionResult(mapOver(tp), currentCs ++ outerCs) .capturing(outerCs) case _ => @@ -382,24 +400,39 @@ extends tpd.TreeTraverser: return tree.tpt match case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => + tree.paramss.foreach(traverse) transformTT(tpt, boxed = false, exact = true) + traverse(tree.rhs) //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") case _ => - traverseChildren(tree) + traverseChildren(tree) case tree @ ValDef(_, tpt: TypeTree, _) => - val isVar = tree.symbol.is(Mutable) - val overrides = tree.symbol.allOverriddenSymbols.hasNext - //if overrides then println(i"transforming overriding ${tree.symbol}") - if isVar || overrides then - transformTT(tpt, - boxed = isVar, // types of mutable variables are boxed - exact = overrides // types of symbols that override a parent don't get a capture set - ) - traverseChildren(tree) + transformTT(tpt, + boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed + exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set + ) + if allowUniversalInBoxed && tree.symbol.is(Mutable) + && !tree.symbol.hasAnnotation(defn.UncheckedCapturesAnnot) + then + CheckCaptures.disallowRootCapabilitiesIn(tpt.knownType, + i"Mutable variable ${tree.symbol.name}", "have type", + "This restriction serves to prevent local capabilities from escaping the scope where they are defined.", + tree.srcPos) + traverse(tree.rhs) case tree @ TypeApply(fn, args) => traverse(fn) for case arg: TypeTree <- args do transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + + if allowUniversalInBoxed then + val polyType = fn.tpe.widen.asInstanceOf[TypeLambda] + for case (arg: TypeTree, pinfo, pname) <- args.lazyZip(polyType.paramInfos).lazyZip((polyType.paramNames)) do + if pinfo.bounds.hi.hasAnnotation(defn.Caps_SealedAnnot) then + def where = if fn.symbol.exists then i" in the body of ${fn.symbol}" else "" + CheckCaptures.disallowRootCapabilitiesIn(arg.knownType, + i"Sealed type variable $pname", " be instantiated to", + i"This is often caused by a local capability$where\nleaking as part of its result.", + tree.srcPos) case _ => traverseChildren(tree) tree match @@ -475,4 +508,13 @@ extends tpd.TreeTraverser: capt.println(i"update info of ${tree.symbol} from $info to $newInfo") case _ => end traverse -end Setup + + def apply(tree: Tree)(using Context): Unit = + traverse(tree)(using ctx.withProperty(Setup.IsDuringSetupKey, Some(()))) + +object Setup: + val IsDuringSetupKey = new Property.Key[Unit] + + def isDuringSetup(using Context): Boolean = + ctx.property(IsDuringSetupKey).isDefined +end Setup \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index e8f7fd502baa..5fe68dd6a7ac 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -31,10 +31,12 @@ object Synthetics: * The types of these symbols are transformed in a special way without * looking at the definitions's RHS */ - def needsTransform(sym: SymDenotation)(using Context): Boolean = - isSyntheticCopyMethod(sym) - || isSyntheticCompanionMethod(sym, nme.apply, nme.unapply) - || isSyntheticCopyDefaultGetterMethod(sym) + def needsTransform(symd: SymDenotation)(using Context): Boolean = + isSyntheticCopyMethod(symd) + || isSyntheticCompanionMethod(symd, nme.apply, nme.unapply) + || isSyntheticCopyDefaultGetterMethod(symd) + || (symd.symbol eq defn.Object_eq) + || (symd.symbol eq defn.Object_ne) /** Method is excluded from regular capture checking. * Excluded are synthetic class members @@ -52,9 +54,9 @@ object Synthetics: /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. * An apply method in a case class like this: - * case class CC(a: {d} A, b: B, {*} c: C) + * case class CC(a: {d} A, b: B, {cap} c: C) * would get type - * def apply(a': {d} A, b: B, {*} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } + * def apply(a': {d} A, b: B, {cap} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } * where `'` is used to indicate the difference between parameter symbol and refinement name. * Analogous for the copy method. */ @@ -121,7 +123,7 @@ object Synthetics: case _ => info - /** Augment an unapply of type `(x: C): D` to `(x: {*} C): {x} D` */ + /** Augment an unapply of type `(x: C): D` to `(x: {cap} C): {x} D` */ private def addUnapplyCaptures(info: Type)(using Context): Type = info match case info: MethodType => val paramInfo :: Nil = info.paramInfos: @unchecked @@ -141,13 +143,16 @@ object Synthetics: /** Drop added capture information from the type of an `unapply` */ private def dropUnapplyCaptures(info: Type)(using Context): Type = info match case info: MethodType => - val CapturingType(oldParamInfo, _) :: Nil = info.paramInfos: @unchecked - def oldResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = oldResult(tp.resType)) - case CapturingType(tp, _) => - tp - info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + info.paramInfos match + case CapturingType(oldParamInfo, _) :: Nil => + def oldResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = oldResult(tp.resType)) + case CapturingType(tp, _) => + tp + info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + case _ => + info case info: PolyType => info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) @@ -163,7 +168,9 @@ object Synthetics: sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) case nme.apply | nme.copy => sym.copySymDenotation(info = addCaptureDeps(sym.info)) - + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = + MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method * of a case class, transform it back to what it was before the CC phase. @@ -176,5 +183,7 @@ object Synthetics: sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) case nme.apply | nme.copy => sym.copySymDenotation(info = dropCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) end Synthetics \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 7f20d7c7d9ea..1411493bcbfd 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -6,7 +6,7 @@ package dotty.tools.dotc.classpath import scala.language.unsafeNulls import java.io.{File => JFile} -import java.net.URL +import java.net.{URI, URL} import java.nio.file.{FileSystems, Files} import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} @@ -194,7 +194,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) else ClassPathEntries(packages(inPackage), classes(inPackage)) - def asURLs: Seq[URL] = Seq(new URL("jrt:/")) + def asURLs: Seq[URL] = Seq(new URI("jrt:/").toURL) // We don't yet have a scheme to represent the JDK modules in our `-classpath`. // java models them as entries in the new "module path", we'll probably need to follow this. def asClassPathStrings: Seq[String] = Nil diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 0cb0ba59c52e..e750d9ccacc0 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -2,12 +2,10 @@ package dotty.tools.dotc.classpath import scala.language.unsafeNulls -import dotty.tools.io.ClassRepresentation +import dotty.tools.io.{ClassPath, ClassRepresentation} import dotty.tools.io.{AbstractFile, VirtualDirectory} import FileUtils._ -import java.net.URL - -import dotty.tools.io.ClassPath +import java.net.{URI, URL} case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { type F = AbstractFile @@ -37,7 +35,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi def isPackage(f: AbstractFile): Boolean = f.isPackage // mimic the behavior of the old nsc.util.DirectoryClassPath - def asURLs: Seq[URL] = Seq(new URL(dir.name)) + def asURLs: Seq[URL] = Seq(new URI(dir.name).toURL) def asClassPathStrings: Seq[String] = Seq(dir.path) override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index 68c900e405da..914df040fbf7 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -60,7 +60,7 @@ trait CliCommand: def defaultValue = s.default match case _: Int | _: String => s.default.toString case _ => "" - val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices ${s.legalChoices}" else "") + val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices : ${s.legalChoices}" else "") (s.name, info.filter(_.nonEmpty).mkString("\n")) end help diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 17e3ec352e7c..247fa28efbda 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -22,6 +22,11 @@ object Config { */ inline val checkConstraintsNonCyclic = false + /** Check that reverse dependencies in constraints are correct and complete. + * Can also be enabled using -Ycheck-constraint-deps. + */ + inline val checkConstraintDeps = false + /** Check that each constraint resulting from a subtype test * is satisfiable. Also check that a type variable instantiation * satisfies its constraints. @@ -78,13 +83,6 @@ object Config { */ inline val failOnInstantiationToNothing = false - /** Enable noDoubleDef checking if option "-YnoDoubleDefs" is set. - * The reason to have an option as well as the present global switch is - * that the noDoubleDef checking is done in a hotspot, and we do not - * want to incur the overhead of checking an option each time. - */ - inline val checkNoDoubleBindings = true - /** Check positions for consistency after parsing */ inline val checkPositions = true @@ -184,6 +182,9 @@ object Config { /** If set, prints a trace of all symbol completions */ inline val showCompletions = false + /** If set, show variable/variable reverse dependencies when printing constraints. */ + inline val showConstraintDeps = true + /** If set, method results that are context functions are flattened by adding * the parameters of the context function results to the methods themselves. * This is an optimization that reduces closure allocations. diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 6d905f500c54..e5ab8f65f55b 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -28,8 +28,11 @@ object Feature: val symbolLiterals = deprecated("symbolLiterals") val fewerBraces = experimental("fewerBraces") val saferExceptions = experimental("saferExceptions") + val clauseInterleaving = experimental("clauseInterleaving") + val relaxedExtensionImports = experimental("relaxedExtensionImports") val pureFunctions = experimental("pureFunctions") val captureChecking = experimental("captureChecking") + val into = experimental("into") val globalOnlyImports: Set[TermName] = Set(pureFunctions, captureChecking) @@ -75,23 +78,33 @@ object Feature: def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) + def clauseInterleavingEnabled(using Context) = enabled(clauseInterleaving) + def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + /** Is pureFunctions enabled for this compilation unit? */ def pureFunsEnabled(using Context) = enabledBySetting(pureFunctions) || ctx.compilationUnit.knowsPureFuns || ccEnabled + /** Is captureChecking enabled for this compilation unit? */ def ccEnabled(using Context) = enabledBySetting(captureChecking) || ctx.compilationUnit.needsCaptureChecking + /** Is pureFunctions enabled for any of the currently compiled compilation units? */ def pureFunsEnabledSomewhere(using Context) = enabledBySetting(pureFunctions) - || enabledBySetting(captureChecking) || ctx.run != null && ctx.run.nn.pureFunsImportEncountered + || ccEnabledSomewhere + + /** Is captureChecking enabled for any of the currently compiled compilation units? */ + def ccEnabledSomewhere(using Context) = + enabledBySetting(captureChecking) + || ctx.run != null && ctx.run.nn.ccImportEncountered def sourceVersionSetting(using Context): SourceVersion = SourceVersion.valueOf(ctx.settings.source.value) @@ -101,7 +114,11 @@ object Feature: case Some(v) => v case none => sourceVersionSetting - def migrateTo3(using Context): Boolean = sourceVersion == `3.0-migration` + def migrateTo3(using Context): Boolean = + sourceVersion == `3.0-migration` + + def fewerBracesEnabled(using Context) = + sourceVersion.isAtLeast(`3.3`) || enabled(fewerBraces) /** If current source migrates to `version`, issue given warning message * and return `true`, otherwise return `false`. @@ -117,7 +134,7 @@ object Feature: def checkExperimentalFeature(which: String, srcPos: SrcPos, note: => String = "")(using Context) = if !isExperimentalEnabled then - report.error(i"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) + report.error(em"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = if !isExperimentalEnabled then @@ -128,7 +145,7 @@ object Feature: i"${sym.owner} is marked @experimental" else i"$sym inherits @experimental" - report.error(s"$symMsg and therefore may only be used in an experimental scope.", srcPos) + report.error(em"$symMsg and therefore may only be used in an experimental scope.", srcPos) /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ def checkExperimentalSettings(using Context): Unit = @@ -139,6 +156,11 @@ object Feature: def isExperimentalEnabled(using Context): Boolean = Properties.experimental && !ctx.settings.YnoExperimental.value + /** Handle language import `import language..` if it is one + * of the global imports `pureFunctions` or `captureChecking`. In this case + * make the compilation unit's and current run's fields accordingly. + * @return true iff import that was handled + */ def handleGlobalLanguageImport(prefix: TermName, imported: Name)(using Context): Boolean = val fullFeatureName = QualifiedName(prefix, imported.asTermName) if fullFeatureName == pureFunctions then @@ -147,7 +169,7 @@ object Feature: true else if fullFeatureName == captureChecking then ctx.compilationUnit.needsCaptureChecking = true - if ctx.run != null then ctx.run.nn.pureFunsImportEncountered = true + if ctx.run != null then ctx.run.nn.ccImportEncountered = true true else false diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala index afa30e38dc2a..8b4eedb0e9d2 100644 --- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala +++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala @@ -211,7 +211,7 @@ class PathResolver(using c: Context) { import classPathFactory._ // Assemble the elements! - def basis: List[Traversable[ClassPath]] = + def basis: List[Iterable[ClassPath]] = val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) List( diff --git a/compiler/src/dotty/tools/dotc/config/Printers.scala b/compiler/src/dotty/tools/dotc/config/Printers.scala index ecb189de9bb3..63d616e1ce3d 100644 --- a/compiler/src/dotty/tools/dotc/config/Printers.scala +++ b/compiler/src/dotty/tools/dotc/config/Printers.scala @@ -32,6 +32,7 @@ object Printers { val init = noPrinter val inlining = noPrinter val interactiv = noPrinter + val macroAnnot = noPrinter val matchTypes = noPrinter val nullables = noPrinter val overload = noPrinter diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 09bedd3e8b35..c06aa304ef72 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -15,9 +15,9 @@ import scala.util.chaining._ class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: - // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` + // Keep synchronized with `classfileVersion` in `BackendUtils` private val minTargetVersion = 8 - private val maxTargetVersion = 19 + private val maxTargetVersion = 21 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) @@ -64,7 +64,6 @@ trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSetti val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) - val YindentColons: Setting[Boolean] = BooleanSetting("-Yindent-colons", "(disabled: use -language:experimental.fewerBraces instead)") /* Decompiler settings */ val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) @@ -156,20 +155,72 @@ private sealed trait VerboseSettings: */ private sealed trait WarningSettings: self: SettingGroup => + import Setting.ChoiceWithHelp + val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) + val WvalueDiscard: Setting[Boolean] = BooleanSetting("-Wvalue-discard", "Warn when non-Unit expression results are unused.") + val WNonUnitStatement = BooleanSetting("-Wnonunit-statement", "Warn when block statements are non-Unit expressions.") - val Wunused: Setting[List[String]] = MultiChoiceSetting( + val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( name = "-Wunused", helpArg = "warning", descr = "Enable or disable specific `unused` warnings", - choices = List("nowarn", "all"), + choices = List( + ChoiceWithHelp("nowarn", ""), + ChoiceWithHelp("all",""), + ChoiceWithHelp( + name = "imports", + description = "Warn if an import selector is not referenced.\n" + + "NOTE : overrided by -Wunused:strict-no-implicit-warn"), + ChoiceWithHelp("privates","Warn if a private member is unused"), + ChoiceWithHelp("locals","Warn if a local definition is unused"), + ChoiceWithHelp("explicits","Warn if an explicit parameter is unused"), + ChoiceWithHelp("implicits","Warn if an implicit parameter is unused"), + ChoiceWithHelp("params","Enable -Wunused:explicits,implicits"), + ChoiceWithHelp("linted","Enable -Wunused:imports,privates,locals,implicits"), + ChoiceWithHelp( + name = "strict-no-implicit-warn", + description = "Same as -Wunused:import, only for imports of explicit named members.\n" + + "NOTE : This overrides -Wunused:imports and NOT set by -Wunused:all" + ), + // ChoiceWithHelp("patvars","Warn if a variable bound in a pattern is unused"), + ChoiceWithHelp( + name = "unsafe-warn-patvars", + description = "(UNSAFE) Warn if a variable bound in a pattern is unused.\n" + + "This warning can generate false positive, as warning cannot be\n" + + "suppressed yet." + ) + ), default = Nil ) object WunusedHas: + def isChoiceSet(s: String)(using Context) = Wunused.value.pipe(us => us.contains(s)) def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) def nowarn(using Context) = allOr("nowarn") + // overrided by strict-no-implicit-warn + def imports(using Context) = + (allOr("imports") || allOr("linted")) && !(strictNoImplicitWarn) + def locals(using Context) = + allOr("locals") || allOr("linted") + /** -Wunused:explicits OR -Wunused:params */ + def explicits(using Context) = + allOr("explicits") || allOr("params") + /** -Wunused:implicits OR -Wunused:params */ + def implicits(using Context) = + allOr("implicits") || allOr("params") || allOr("linted") + def params(using Context) = allOr("params") + def privates(using Context) = + allOr("privates") || allOr("linted") + def patvars(using Context) = + isChoiceSet("unsafe-warn-patvars") // not with "all" + // allOr("patvars") // todo : rename once fixed + def linted(using Context) = + allOr("linted") + def strictNoImplicitWarn(using Context) = + isChoiceSet("strict-no-implicit-warn") + val Wconf: Setting[List[String]] = MultiStringSetting( "-Wconf", "patterns", @@ -282,6 +333,7 @@ private sealed trait YSettings: val Yscala2Unpickler: Setting[String] = StringSetting("-Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") val YnoImports: Setting[Boolean] = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") + val Yimports: Setting[List[String]] = MultiStringSetting("-Yimports", helpArg="", "Custom root imports. If set, none of scala.*, java.lang.*, or Predef.* will be imported unless explicitly included.") val YnoGenericSig: Setting[Boolean] = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.") val YnoPredef: Setting[Boolean] = BooleanSetting("-Yno-predef", "Compile without importing Predef.") val Yskip: Setting[List[String]] = PhasesSetting("-Yskip", "Skip") @@ -309,10 +361,12 @@ private sealed trait YSettings: val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") + val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features") + val YlegacyLazyVals: Setting[Boolean] = BooleanSetting("-Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals") val YprofileEnabled: Setting[Boolean] = BooleanSetting("-Yprofile-enabled", "Enable profiling.") val YprofileDestination: Setting[String] = StringSetting("-Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") @@ -330,7 +384,6 @@ private sealed trait YSettings: val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") - val YlightweightLazyVals: Setting[Boolean] = BooleanSetting("-Ylightweight-lazy-vals", "Use experimental lightweight implementation of lazy vals") /** Area-specific debug output */ val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 277833afbd5d..34e5582e8a91 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -11,6 +11,7 @@ import annotation.tailrec import collection.mutable.ArrayBuffer import reflect.ClassTag import scala.util.{Success, Failure} +import dotty.tools.dotc.config.Settings.Setting.ChoiceWithHelp object Settings: @@ -69,11 +70,11 @@ object Settings: def updateIn(state: SettingsState, x: Any): SettingsState = x match case _: T => state.update(idx, x) - case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${implicitly[ClassTag[T]]}") + case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${summon[ClassTag[T]]}") def isDefaultIn(state: SettingsState): Boolean = valueIn(state) == default - def isMultivalue: Boolean = implicitly[ClassTag[T]] == ListTag + def isMultivalue: Boolean = summon[ClassTag[T]] == ListTag def legalChoices: String = choices match { @@ -106,6 +107,11 @@ object Settings: def missingArg = fail(s"missing argument for option $name", args) + def setBoolean(argValue: String, args: List[String]) = + if argValue.equalsIgnoreCase("true") || argValue.isEmpty then update(true, args) + else if argValue.equalsIgnoreCase("false") then update(false, args) + else fail(s"$argValue is not a valid choice for boolean setting $name", args) + def setString(argValue: String, args: List[String]) = choices match case Some(xs) if !xs.contains(argValue) => @@ -126,9 +132,9 @@ object Settings: catch case _: NumberFormatException => fail(s"$argValue is not an integer argument for $name", args) - def doSet(argRest: String) = ((implicitly[ClassTag[T]], args): @unchecked) match { + def doSet(argRest: String) = ((summon[ClassTag[T]], args): @unchecked) match { case (BooleanTag, _) => - update(true, args) + setBoolean(argRest, args) case (OptionTag, _) => update(Some(propertyClass.get.getConstructor().newInstance()), args) case (ListTag, _) => @@ -184,6 +190,19 @@ object Settings: def update(x: T)(using Context): SettingsState = setting.updateIn(ctx.settingsState, x) def isDefault(using Context): Boolean = setting.isDefaultIn(ctx.settingsState) + /** + * A choice with help description. + * + * NOTE : `equals` and `toString` have special behaviors + */ + case class ChoiceWithHelp[T](name: T, description: String): + override def equals(x: Any): Boolean = x match + case s:String => s == name.toString() + case _ => false + override def toString(): String = + s"\n- $name${if description.isEmpty() then "" else s" :\n\t${description.replace("\n","\n\t")}"}" + end Setting + class SettingGroup { private val _allSettings = new ArrayBuffer[Setting[?]] @@ -265,6 +284,9 @@ object Settings: def MultiChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def MultiChoiceHelpSetting(name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = publish(Setting(name, descr, default, aliases = aliases)) @@ -290,6 +312,6 @@ object Settings: publish(Setting(name, descr, default)) def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = - publish(Setting(name, descr, None, propertyClass = Some(implicitly[ClassTag[T]].runtimeClass), aliases = aliases)) + publish(Setting(name, descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases)) } end Settings diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 545e2f2d9b42..b8fa7994ce0c 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -8,6 +8,7 @@ import util.Property enum SourceVersion: case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. case `3.2-migration`, `3.2` + case `3.3-migration`, `3.3` case `future-migration`, `future` val isMigrating: Boolean = toString.endsWith("-migration") @@ -17,8 +18,10 @@ enum SourceVersion: def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal + def isAtMost(v: SourceVersion) = stable.ordinal <= v.ordinal + object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.2` + def defaultSourceVersion = `3.3` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index aa8ead280bbf..202f3eb26e41 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -2,12 +2,13 @@ package dotty.tools package dotc package core -import Symbols._, Types._, Contexts._, Constants._ -import dotty.tools.dotc.ast.tpd, tpd.* +import Symbols._, Types._, Contexts._, Constants._, Phases.* +import ast.tpd, tpd.* import util.Spans.Span import printing.{Showable, Printer} import printing.Texts.Text -import annotation.internal.sharable + +import scala.annotation.internal.sharable object Annotations { @@ -87,6 +88,22 @@ object Annotations { def sameAnnotation(that: Annotation)(using Context): Boolean = symbol == that.symbol && tree.sameTree(that.tree) + def hasOneOfMetaAnnotation(metaSyms: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): Boolean = atPhaseNoLater(erasurePhase) { + def go(metaSyms: Set[Symbol]) = + def recTp(tp: Type): Boolean = tp.dealiasKeepAnnots match + case AnnotatedType(parent, metaAnnot) => metaSyms.exists(metaAnnot.matches) || recTp(parent) + case _ => false + def rec(tree: Tree): Boolean = methPart(tree) match + case New(tpt) => rec(tpt) + case Select(qual, _) => rec(qual) + case Annotated(arg, metaAnnot) => metaSyms.exists(metaAnnot.tpe.classSymbol.derivesFrom) || rec(arg) + case t @ Ident(_) => recTp(t.tpe) + case Typed(expr, _) => rec(expr) + case _ => false + metaSyms.exists(symbol.hasAnnotation) || rec(tree) + go(metaSyms) || orNoneOf.nonEmpty && !go(orNoneOf) + } + /** Operations for hash-consing, can be overridden */ def hash: Int = System.identityHashCode(this) def eql(that: Annotation) = this eq that @@ -178,26 +195,20 @@ object Annotations { def apply(tree: Tree): ConcreteAnnotation = ConcreteAnnotation(tree) - def apply(cls: ClassSymbol)(using Context): Annotation = - apply(cls, Nil) - - def apply(cls: ClassSymbol, arg: Tree)(using Context): Annotation = - apply(cls, arg :: Nil) - - def apply(cls: ClassSymbol, arg1: Tree, arg2: Tree)(using Context): Annotation = - apply(cls, arg1 :: arg2 :: Nil) + def apply(cls: ClassSymbol, span: Span)(using Context): Annotation = + apply(cls, Nil, span) - def apply(cls: ClassSymbol, args: List[Tree])(using Context): Annotation = - apply(cls.typeRef, args) + def apply(cls: ClassSymbol, arg: Tree, span: Span)(using Context): Annotation = + apply(cls, arg :: Nil, span) - def apply(atp: Type, arg: Tree)(using Context): Annotation = - apply(atp, arg :: Nil) + def apply(cls: ClassSymbol, args: List[Tree], span: Span)(using Context): Annotation = + apply(cls.typeRef, args, span) - def apply(atp: Type, arg1: Tree, arg2: Tree)(using Context): Annotation = - apply(atp, arg1 :: arg2 :: Nil) + def apply(atp: Type, arg: Tree, span: Span)(using Context): Annotation = + apply(atp, arg :: Nil, span) - def apply(atp: Type, args: List[Tree])(using Context): Annotation = - apply(New(atp, args)) + def apply(atp: Type, args: List[Tree], span: Span)(using Context): Annotation = + apply(New(atp, args).withSpan(span)) /** Create an annotation where the tree is computed lazily. */ def deferred(sym: Symbol)(treeFn: Context ?=> Tree): Annotation = @@ -234,15 +245,15 @@ object Annotations { else None } - def makeSourceFile(path: String)(using Context): Annotation = - apply(defn.SourceFileAnnot, Literal(Constant(path))) + def makeSourceFile(path: String, span: Span)(using Context): Annotation = + apply(defn.SourceFileAnnot, Literal(Constant(path)), span) } @sharable val EmptyAnnotation = Annotation(EmptyTree) def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = { val tref = cls.typeRef - Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref)) + Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref), cls.span) } /** Extracts the type of the thrown exception from an annotation. diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index 4b441d512dec..a61701eee2d7 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -149,7 +149,7 @@ class CheckRealizable(using Context) { */ private def boundsRealizability(tp: Type) = { - val memberProblems = withMode(Mode.CheckBounds) { + val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { for { mbr <- tp.nonClassTypeMembers if !(mbr.info.loBound <:< mbr.info.hiBound) @@ -157,7 +157,7 @@ class CheckRealizable(using Context) { yield new HasProblemBounds(mbr.name, mbr.info) } - val refinementProblems = withMode(Mode.CheckBounds) { + val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { for { name <- refinedNames(tp) if (name.isTypeName) diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala index 07b6e71cdcc9..c634f847e510 100644 --- a/compiler/src/dotty/tools/dotc/core/Constraint.scala +++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala @@ -4,6 +4,7 @@ package core import Types._, Contexts._ import printing.Showable +import util.{SimpleIdentitySet, SimpleIdentityMap} /** Constraint over undetermined type parameters. Constraints are built * over values of the following types: @@ -70,6 +71,9 @@ abstract class Constraint extends Showable { */ def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds + /** The current bounds of type parameter `param` */ + def bounds(param: TypeParamRef)(using Context): TypeBounds + /** A new constraint which is derived from this constraint by adding * entries for all type parameters of `poly`. * @param tvars A list of type variables associated with the params, @@ -87,6 +91,8 @@ abstract class Constraint extends Showable { * - Another type, indicating a solution for the parameter * * @pre `this contains param`. + * @pre `tp` does not contain top-level references to `param` + * (see `validBoundsFor`) */ def updateEntry(param: TypeParamRef, tp: Type)(using Context): This @@ -128,7 +134,7 @@ abstract class Constraint extends Showable { /** Is `tv` marked as hard in the constraint? */ def isHard(tv: TypeVar): Boolean - + /** The same as this constraint, but with `tv` marked as hard. */ def withHard(tv: TypeVar)(using Context): This @@ -165,15 +171,49 @@ abstract class Constraint extends Showable { */ def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean - /** Check that no constrained parameter contains itself as a bound */ - def checkNonCyclic()(using Context): this.type - /** Does `param` occur at the toplevel in `tp` ? * Toplevel means: the type itself or a factor in some * combination of `&` or `|` types. */ def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean + /** Sanitize `bound` to make it either a valid upper or lower bound for + * `param` depending on `isUpper`. + * + * Toplevel references to `param`, are replaced by `Any` if `isUpper` is true + * and `Nothing` otherwise. + * + * @see `occursAtTopLevel` for a definition of "toplevel" + * @see `validBoundsFor` to sanitize both the lower and upper bound at once. + */ + def validBoundFor(param: TypeParamRef, bound: Type, isUpper: Boolean)(using Context): Type + + /** Sanitize `bounds` to make them valid constraints for `param`. + * + * @see `validBoundFor` for details. + */ + def validBoundsFor(param: TypeParamRef, bounds: TypeBounds)(using Context): Type + + /** A string that shows the reverse dependencies maintained by this constraint + * (coDeps and contraDeps for OrderingConstraints). + */ + def depsToString(using Context): String + + /** Does the constraint restricted to variables outside `except` depend on `tv` + * in the given direction `co`? + * @param `co` If true, test whether the constraint would change if the variable is made larger + * otherwise, test whether the constraint would change if the variable is made smaller. + */ + def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean + + /** Depending on Config settngs: + * - Under `checkConstraintsNonCyclic`, check that no constrained + * parameter contains itself as a bound. + * - Under `checkConstraintDeps`, check hat reverse dependencies in + * constraints are correct and complete. + */ + def checkWellFormed()(using Context): this.type + /** Check that constraint only refers to TypeParamRefs bound by itself */ def checkClosed()(using Context): Unit diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 1dfa04822766..9ffe2bda73cb 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -58,6 +58,12 @@ trait ConstraintHandling { */ protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty + /** Used for match type reduction: If false, we don't recognize an abstract type + * to be a subtype type of any of its base classes. This is in place only at the + * toplevel; it is turned on again when we add parts of the scrutinee to the constraint. + */ + protected var canWidenAbstract: Boolean = true + protected var myNecessaryConstraintsOnly = false /** When collecting the constraints needed for a particular subtyping * judgment to be true, we sometimes need to approximate the constraint @@ -146,8 +152,8 @@ trait ConstraintHandling { return param LevelAvoidMap(0, maxLevel)(param) match case freshVar: TypeVar => freshVar.origin - case _ => throw new TypeError( - i"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") + case _ => throw TypeError( + em"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) @@ -251,7 +257,7 @@ trait ConstraintHandling { end LevelAvoidMap /** Approximate `rawBound` if needed to make it a legal bound of `param` by - * avoiding wildcards and types with a level strictly greater than its + * avoiding cycles, wildcards and types with a level strictly greater than its * `nestingLevel`. * * Note that level-checking must be performed here and cannot be delayed @@ -277,7 +283,7 @@ trait ConstraintHandling { // This is necessary for i8900-unflip.scala to typecheck. val v = if necessaryConstraintsOnly then -this.variance else this.variance atVariance(v)(super.legalVar(tp)) - approx(rawBound) + constraint.validBoundFor(param, approx(rawBound), isUpper) end legalBound protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = @@ -407,8 +413,10 @@ trait ConstraintHandling { constraint = constraint.addLess(p2, p1, direction = if pKept eq p1 then KeepParam2 else KeepParam1) - val boundKept = constraint.nonParamBounds(pKept).substParam(pRemoved, pKept) - var boundRemoved = constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept) + val boundKept = constraint.validBoundsFor(pKept, + constraint.nonParamBounds( pKept).substParam(pRemoved, pKept).bounds) + var boundRemoved = constraint.validBoundsFor(pKept, + constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept).bounds) if level1 != level2 then boundRemoved = LevelAvoidMap(-1, math.min(level1, level2))(boundRemoved) @@ -550,6 +558,13 @@ trait ConstraintHandling { inst end approximation + private def isTransparent(tp: Type, traitOnly: Boolean)(using Context): Boolean = tp match + case AndType(tp1, tp2) => + isTransparent(tp1, traitOnly) && isTransparent(tp2, traitOnly) + case _ => + val cls = tp.underlyingClassRef(refinementOK = false).typeSymbol + cls.isTransparentClass && (!traitOnly || cls.is(Trait)) + /** If `tp` is an intersection such that some operands are transparent trait instances * and others are not, replace as many transparent trait instances as possible with Any * as long as the result is still a subtype of `bound`. But fall back to the @@ -562,18 +577,17 @@ trait ConstraintHandling { var dropped: List[Type] = List() // the types dropped so far, last one on top def dropOneTransparentTrait(tp: Type): Type = - val tpd = tp.dealias - if tpd.typeSymbol.isTransparentTrait && !tpd.isLambdaSub && !kept.contains(tpd) then - dropped = tpd :: dropped + if isTransparent(tp, traitOnly = true) && !kept.contains(tp) then + dropped = tp :: dropped defn.AnyType - else tpd match + else tp match case AndType(tp1, tp2) => val tp1w = dropOneTransparentTrait(tp1) if tp1w ne tp1 then tp1w & tp2 else val tp2w = dropOneTransparentTrait(tp2) if tp2w ne tp2 then tp1 & tp2w - else tpd + else tp case _ => tp @@ -612,8 +626,9 @@ trait ConstraintHandling { /** Widen inferred type `inst` with upper `bound`, according to the following rules: * 1. If `inst` is a singleton type, or a union containing some singleton types, - * widen (all) the singleton type(s), provided the result is a subtype of `bound`. - * (i.e. `inst.widenSingletons <:< bound` succeeds with satisfiable constraint) + * widen (all) the singleton type(s), provided the result is a subtype of `bound` + * (i.e. `inst.widenSingletons <:< bound` succeeds with satisfiable constraint) and + * is not transparent according to `isTransparent`. * 2a. If `inst` is a union type and `widenUnions` is true, approximate the union type * from above by an intersection of all common base types, provided the result * is a subtype of `bound`. @@ -635,7 +650,7 @@ trait ConstraintHandling { def widenOr(tp: Type) = if widenUnions then val tpw = tp.widenUnion - if (tpw ne tp) && (tpw <:< bound) then tpw else tp + if (tpw ne tp) && !isTransparent(tpw, traitOnly = false) && (tpw <:< bound) then tpw else tp else tp.hardenUnions def widenSingle(tp: Type) = @@ -648,7 +663,12 @@ trait ConstraintHandling { val wideInst = if isSingleton(bound) then inst - else dropTransparentTraits(widenIrreducible(widenOr(widenSingle(inst))), bound) + else + val widenedFromSingle = widenSingle(inst) + val widenedFromUnion = widenOr(widenedFromSingle) + val widened = dropTransparentTraits(widenedFromUnion, bound) + widenIrreducible(widened) + wideInst match case wideInst: TypeRef if wideInst.symbol.is(Module) => TermRef(wideInst.prefix, wideInst.symbol.sourceModule) @@ -729,16 +749,7 @@ trait ConstraintHandling { } /** The current bounds of type parameter `param` */ - def bounds(param: TypeParamRef)(using Context): TypeBounds = { - val e = constraint.entry(param) - if (e.exists) e.bounds - else { - // TODO: should we change the type of paramInfos to nullable? - val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos - if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala - else TypeBounds.empty - } - } + def bounds(param: TypeParamRef)(using Context): TypeBounds = constraint.bounds(param) /** Add type lambda `tl`, possibly with type variables `tvars`, to current constraint * and propagate all bounds. @@ -839,13 +850,17 @@ trait ConstraintHandling { //checkPropagated(s"adding $description")(true) // DEBUG in case following fails checkPropagated(s"added $description") { addConstraintInvocations += 1 + val saved = canWidenAbstract + canWidenAbstract = true try bound match case bound: TypeParamRef if constraint contains bound => addParamBound(bound) case _ => val pbound = avoidLambdaParams(bound) kindCompatible(param, pbound) && addBoundTransitively(param, pbound, !fromBelow) - finally addConstraintInvocations -= 1 + finally + canWidenAbstract = saved + addConstraintInvocations -= 1 } end addConstraint diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index d2a88a422b2e..e0e43169820a 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -28,6 +28,7 @@ import printing._ import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} import classfile.ReusableDataReader import StdNames.nme +import compiletime.uninitialized import scala.annotation.internal.sharable @@ -123,7 +124,9 @@ object Contexts { */ abstract class Context(val base: ContextBase) { thiscontext => - given Context = this + protected given Context = this + + def outer: Context /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */ def outersIterator: Iterator[Context] = new Iterator[Context] { @@ -132,65 +135,21 @@ object Contexts { def next = { val c = current; current = current.outer; c } } - /** The outer context */ - private var _outer: Context = _ - protected def outer_=(outer: Context): Unit = _outer = outer - final def outer: Context = _outer - - /** The current context */ - private var _period: Period = _ - protected def period_=(period: Period): Unit = { - assert(period.firstPhaseId == period.lastPhaseId, period) - _period = period - } - final def period: Period = _period - - /** The scope nesting level */ - private var _mode: Mode = _ - protected def mode_=(mode: Mode): Unit = _mode = mode - final def mode: Mode = _mode - - /** The current owner symbol */ - private var _owner: Symbol = _ - protected def owner_=(owner: Symbol): Unit = _owner = owner - final def owner: Symbol = _owner - - /** The current tree */ - private var _tree: Tree[? >: Untyped]= _ - protected def tree_=(tree: Tree[? >: Untyped]): Unit = _tree = tree - final def tree: Tree[? >: Untyped] = _tree - - /** The current scope */ - private var _scope: Scope = _ - protected def scope_=(scope: Scope): Unit = _scope = scope - final def scope: Scope = _scope - - /** The current typerstate */ - private var _typerState: TyperState = _ - protected def typerState_=(typerState: TyperState): Unit = _typerState = typerState - final def typerState: TyperState = _typerState - - /** The current bounds in force for type parameters appearing in a GADT */ - private var _gadt: GadtConstraint = _ - protected def gadt_=(gadt: GadtConstraint): Unit = _gadt = gadt - final def gadt: GadtConstraint = _gadt - - /** The history of implicit searches that are currently active */ - private var _searchHistory: SearchHistory = _ - protected def searchHistory_= (searchHistory: SearchHistory): Unit = _searchHistory = searchHistory - final def searchHistory: SearchHistory = _searchHistory - - /** The current source file */ - private var _source: SourceFile = _ - protected def source_=(source: SourceFile): Unit = _source = source - final def source: SourceFile = _source + def period: Period + def mode: Mode + def owner: Symbol + def tree: Tree[?] + def scope: Scope + def typerState: TyperState + def gadt: GadtConstraint = gadtState.gadt + def gadtState: GadtState + def searchHistory: SearchHistory + def source: SourceFile /** A map in which more contextual properties can be stored * Typically used for attributes that are read and written only in special situations. */ - private var _moreProperties: Map[Key[Any], Any] = _ - protected def moreProperties_=(moreProperties: Map[Key[Any], Any]): Unit = _moreProperties = moreProperties - final def moreProperties: Map[Key[Any], Any] = _moreProperties + def moreProperties: Map[Key[Any], Any] def property[T](key: Key[T]): Option[T] = moreProperties.get(key).asInstanceOf[Option[T]] @@ -200,9 +159,7 @@ object Contexts { * Access to store entries is much faster than access to properties, and only * slightly slower than a normal field access would be. */ - private var _store: Store = _ - protected def store_=(store: Store): Unit = _store = store - final def store: Store = _store + def store: Store /** The compiler callback implementation, or null if no callback will be called. */ def compilerCallback: CompilerCallback = store(compilerCallbackLoc) @@ -240,7 +197,7 @@ object Contexts { def typeAssigner: TypeAssigner = store(typeAssignerLoc) /** The new implicit references that are introduced by this scope */ - protected var implicitsCache: ContextualImplicits | Null = null + private var implicitsCache: ContextualImplicits | Null = null def implicits: ContextualImplicits = { if (implicitsCache == null) implicitsCache = { @@ -299,13 +256,12 @@ object Contexts { file catch case ex: InvalidPathException => - report.error(s"invalid file path: ${ex.getMessage}") + report.error(em"invalid file path: ${ex.getMessage}") NoAbstractFile /** AbstractFile with given path, memoized */ def getFile(name: String): AbstractFile = getFile(name.toTermName) - private var related: SimpleIdentityMap[Phase | SourceFile, Context] | Null = null private def lookup(key: Phase | SourceFile): Context | Null = @@ -356,7 +312,7 @@ object Contexts { /** If -Ydebug is on, the top of the stack trace where this context * was created, otherwise `null`. */ - private var creationTrace: Array[StackTraceElement] = _ + private var creationTrace: Array[StackTraceElement] = uninitialized private def setCreationTrace() = creationTrace = (new Throwable).getStackTrace().take(20) @@ -455,7 +411,7 @@ object Contexts { val constrCtx = outersIterator.dropWhile(_.outer.owner == owner).next() superOrThisCallContext(owner, constrCtx.scope) .setTyperState(typerState) - .setGadt(gadt) + .setGadtState(gadtState) .fresh .setScope(this.scope) } @@ -469,7 +425,7 @@ object Contexts { } /** The context of expression `expr` seen as a member of a statement sequence */ - def exprContext(stat: Tree[? >: Untyped], exprOwner: Symbol): Context = + def exprContext(stat: Tree[?], exprOwner: Symbol): Context = if (exprOwner == this.owner) this else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext else fresh.setOwner(exprOwner) @@ -488,39 +444,20 @@ object Contexts { def useColors: Boolean = base.settings.color.value == "always" - /** Is the explicit nulls option set? */ - def explicitNulls: Boolean = base.settings.YexplicitNulls.value + def withColors: FreshContext = + fresh.setSetting(ctx.settings.color, "always") - /** Initialize all context fields, except typerState, which has to be set separately - * @param outer The outer context - * @param origin The context from which fields are copied - */ - private[Contexts] def init(outer: Context, origin: Context): this.type = { - _outer = outer - _period = origin.period - _mode = origin.mode - _owner = origin.owner - _tree = origin.tree - _scope = origin.scope - _gadt = origin.gadt - _searchHistory = origin.searchHistory - _source = origin.source - _moreProperties = origin.moreProperties - _store = origin.store - this - } + def withoutColors: FreshContext = + fresh.setSetting(ctx.settings.color, "never") - def reuseIn(outer: Context): this.type = - implicitsCache = null - related = null - init(outer, outer) + /** Is the explicit nulls option set? */ + def explicitNulls: Boolean = base.settings.YexplicitNulls.value /** A fresh clone of this context embedded in this context. */ def fresh: FreshContext = freshOver(this) /** A fresh clone of this context embedded in the specified `outer` context. */ def freshOver(outer: Context): FreshContext = - util.Stats.record("Context.fresh") FreshContext(base).init(outer, this).setTyperState(this.typerState) final def withOwner(owner: Symbol): Context = @@ -565,6 +502,15 @@ object Contexts { def uniques: util.WeakHashSet[Type] = base.uniques def initialize()(using Context): Unit = base.initialize() + + protected def resetCaches(): Unit = + implicitsCache = null + related = null + + /** Reuse this context as a fresh context nested inside `outer` + * But keep the typerstate, this one has to be set explicitly if needed. + */ + def reuseIn(outer: Context): this.type } /** A condensed context provides only a small memory footprint over @@ -579,55 +525,138 @@ object Contexts { * of its attributes using the with... methods. */ class FreshContext(base: ContextBase) extends Context(base) { + util.Stats.record("Context.fresh") + + private var _outer: Context = uninitialized + def outer: Context = _outer + + private var _period: Period = uninitialized + final def period: Period = _period + + private var _mode: Mode = uninitialized + final def mode: Mode = _mode + + private var _owner: Symbol = uninitialized + final def owner: Symbol = _owner + + private var _tree: Tree[?]= _ + final def tree: Tree[?] = _tree + + private var _scope: Scope = uninitialized + final def scope: Scope = _scope + + private var _typerState: TyperState = uninitialized + final def typerState: TyperState = _typerState + + private var _gadtState: GadtState = uninitialized + final def gadtState: GadtState = _gadtState + + private var _searchHistory: SearchHistory = uninitialized + final def searchHistory: SearchHistory = _searchHistory + + private var _source: SourceFile = uninitialized + final def source: SourceFile = _source + + private var _moreProperties: Map[Key[Any], Any] = uninitialized + final def moreProperties: Map[Key[Any], Any] = _moreProperties + + private var _store: Store = uninitialized + final def store: Store = _store + + /** Initialize all context fields, except typerState, which has to be set separately + * @param outer The outer context + * @param origin The context from which fields are copied + */ + private[Contexts] def init(outer: Context, origin: Context): this.type = { + _outer = outer + _period = origin.period + _mode = origin.mode + _owner = origin.owner + _tree = origin.tree + _scope = origin.scope + _gadtState = origin.gadtState + _searchHistory = origin.searchHistory + _source = origin.source + _moreProperties = origin.moreProperties + _store = origin.store + this + } + + def reuseIn(outer: Context): this.type = + resetCaches() + init(outer, outer) + def setPeriod(period: Period): this.type = util.Stats.record("Context.setPeriod") - this.period = period + //assert(period.firstPhaseId == period.lastPhaseId, period) + this._period = period this + def setMode(mode: Mode): this.type = util.Stats.record("Context.setMode") - this.mode = mode + this._mode = mode this + def setOwner(owner: Symbol): this.type = util.Stats.record("Context.setOwner") assert(owner != NoSymbol) - this.owner = owner + this._owner = owner this - def setTree(tree: Tree[? >: Untyped]): this.type = + + def setTree(tree: Tree[?]): this.type = util.Stats.record("Context.setTree") - this.tree = tree + this._tree = tree + this + + def setScope(scope: Scope): this.type = + this._scope = scope this - def setScope(scope: Scope): this.type = { this.scope = scope; this } + def setNewScope: this.type = util.Stats.record("Context.setScope") - this.scope = newScope + this._scope = newScope this - def setTyperState(typerState: TyperState): this.type = { this.typerState = typerState; this } - def setNewTyperState(): this.type = setTyperState(typerState.fresh(committable = true)) - def setExploreTyperState(): this.type = setTyperState(typerState.fresh(committable = false)) - def setReporter(reporter: Reporter): this.type = setTyperState(typerState.fresh().setReporter(reporter)) - def setTyper(typer: Typer): this.type = { this.scope = typer.scope; setTypeAssigner(typer) } - def setGadt(gadt: GadtConstraint): this.type = - util.Stats.record("Context.setGadt") - this.gadt = gadt + + def setTyperState(typerState: TyperState): this.type = + this._typerState = typerState + this + def setNewTyperState(): this.type = + setTyperState(typerState.fresh(committable = true)) + def setExploreTyperState(): this.type = + setTyperState(typerState.fresh(committable = false)) + def setReporter(reporter: Reporter): this.type = + setTyperState(typerState.fresh().setReporter(reporter)) + + def setTyper(typer: Typer): this.type = + this._scope = typer.scope + setTypeAssigner(typer) + + def setGadtState(gadtState: GadtState): this.type = + util.Stats.record("Context.setGadtState") + this._gadtState = gadtState this - def setFreshGADTBounds: this.type = setGadt(gadt.fresh) + def setFreshGADTBounds: this.type = + setGadtState(gadtState.fresh) + def setSearchHistory(searchHistory: SearchHistory): this.type = util.Stats.record("Context.setSearchHistory") - this.searchHistory = searchHistory + this._searchHistory = searchHistory this + def setSource(source: SourceFile): this.type = util.Stats.record("Context.setSource") - this.source = source + this._source = source this + private def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = util.Stats.record("Context.setMoreProperties") - this.moreProperties = moreProperties + this._moreProperties = moreProperties this + private def setStore(store: Store): this.type = util.Stats.record("Context.setStore") - this.store = store + this._store = store this - def setImplicits(implicits: ContextualImplicits): this.type = { this.implicitsCache = implicits; this } def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { setSource(compilationUnit.source) @@ -681,6 +710,28 @@ object Contexts { def setDebug: this.type = setSetting(base.settings.Ydebug, true) } + object FreshContext: + /** Defines an initial context with given context base and possible settings. */ + def initial(base: ContextBase, settingsGroup: SettingGroup): Context = + val c = new FreshContext(base) + c._outer = NoContext + c._period = InitialPeriod + c._mode = Mode.None + c._typerState = TyperState.initialState() + c._owner = NoSymbol + c._tree = untpd.EmptyTree + c._moreProperties = Map(MessageLimiter -> DefaultMessageLimiter()) + c._scope = EmptyScope + c._source = NoSource + c._store = initialStore + .updated(settingsStateLoc, settingsGroup.defaultState) + .updated(notNullInfosLoc, Nil) + .updated(compilationUnitLoc, NoCompilationUnit) + c._searchHistory = new SearchRoot + c._gadtState = GadtState(GadtConstraint.empty) + c + end FreshContext + given ops: AnyRef with extension (c: Context) def addNotNullInfo(info: NotNullInfo) = @@ -710,56 +761,40 @@ object Contexts { final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode) } - private def exploreCtx(using Context): FreshContext = - util.Stats.record("explore") - val base = ctx.base - import base._ - val nestedCtx = - if exploresInUse < exploreContexts.size then - exploreContexts(exploresInUse).reuseIn(ctx) - else - val ts = TyperState() - .setReporter(ExploringReporter()) - .setCommittable(false) - val c = FreshContext(ctx.base).init(ctx, ctx).setTyperState(ts) - exploreContexts += c - c - exploresInUse += 1 - val nestedTS = nestedCtx.typerState - nestedTS.init(ctx.typerState, ctx.typerState.constraint) - nestedCtx - - private def wrapUpExplore(ectx: Context) = - ectx.reporter.asInstanceOf[ExploringReporter].reset() - ectx.base.exploresInUse -= 1 - + /** Run `op` with a pool-allocated context that has an ExporeTyperState. */ inline def explore[T](inline op: Context ?=> T)(using Context): T = - val ectx = exploreCtx - try op(using ectx) finally wrapUpExplore(ectx) + exploreInFreshCtx(op) + /** Run `op` with a pool-allocated FreshContext that has an ExporeTyperState. */ inline def exploreInFreshCtx[T](inline op: FreshContext ?=> T)(using Context): T = - val ectx = exploreCtx - try op(using ectx) finally wrapUpExplore(ectx) - - private def changeOwnerCtx(owner: Symbol)(using Context): Context = - val base = ctx.base - import base._ - val nestedCtx = - if changeOwnersInUse < changeOwnerContexts.size then - changeOwnerContexts(changeOwnersInUse).reuseIn(ctx) - else - val c = FreshContext(ctx.base).init(ctx, ctx) - changeOwnerContexts += c - c - changeOwnersInUse += 1 - nestedCtx.setOwner(owner).setTyperState(ctx.typerState) - - /** Run `op` in current context, with a mode is temporarily set as specified. + val pool = ctx.base.exploreContextPool + val nestedCtx = pool.next() + try op(using nestedCtx) + finally + nestedCtx.typerState.reporter.asInstanceOf[ExploringReporter].reset() + pool.free() + + /** Run `op` with a pool-allocated context that has a fresh typer state. + * Commit the typer state if `commit` applied to `op`'s result returns true. */ + inline def withFreshTyperState[T](inline op: Context ?=> T, inline commit: T => Context ?=> Boolean)(using Context): T = + val pool = ctx.base.freshTSContextPool + val nestedCtx = pool.next() + try + val result = op(using nestedCtx) + if commit(result)(using nestedCtx) then + nestedCtx.typerState.commit() + nestedCtx.typerState.setCommittable(true) + result + finally + pool.free() + + /** Run `op` with a pool-allocated context that has the given `owner`. */ inline def runWithOwner[T](owner: Symbol)(inline op: Context ?=> T)(using Context): T = if Config.reuseOwnerContexts then - try op(using changeOwnerCtx(owner)) - finally ctx.base.changeOwnersInUse -= 1 + val pool = ctx.base.generalContextPool + try op(using pool.next().setOwner(owner).setTyperState(ctx.typerState)) + finally pool.free() else op(using ctx.fresh.setOwner(owner)) @@ -796,30 +831,9 @@ object Contexts { finally ctx.base.comparersInUse = saved end comparing - /** A class defining the initial context with given context base - * and set of possible settings. - */ - private class InitialContext(base: ContextBase, settingsGroup: SettingGroup) extends FreshContext(base) { - outer = NoContext - period = InitialPeriod - mode = Mode.None - typerState = TyperState.initialState() - owner = NoSymbol - tree = untpd.EmptyTree - moreProperties = Map(MessageLimiter -> DefaultMessageLimiter()) - scope = EmptyScope - source = NoSource - store = initialStore - .updated(settingsStateLoc, settingsGroup.defaultState) - .updated(notNullInfosLoc, Nil) - .updated(compilationUnitLoc, NoCompilationUnit) - searchHistory = new SearchRoot - gadt = GadtConstraint.empty - } - - @sharable object NoContext extends Context((null: ContextBase | Null).uncheckedNN) { - source = NoSource + @sharable val NoContext: Context = new FreshContext((null: ContextBase | Null).uncheckedNN) { override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null, false)(this: @unchecked) + setSource(NoSource) } /** A context base defines state and associated methods that exist once per @@ -833,10 +847,10 @@ object Contexts { val settings: ScalaSettings = new ScalaSettings /** The initial context */ - val initialCtx: Context = new InitialContext(this, settings) + val initialCtx: Context = FreshContext.initial(this: @unchecked, settings) /** The platform, initialized by `initPlatform()`. */ - private var _platform: Platform | Null = _ + private var _platform: Platform | Null = uninitialized /** The platform */ def platform: Platform = { @@ -872,6 +886,47 @@ object Contexts { allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) } + class ContextPool: + protected def fresh()(using Context): FreshContext = + FreshContext(ctx.base).init(ctx, ctx) + + private var inUse: Int = 0 + private var pool = new mutable.ArrayBuffer[FreshContext] + + def next()(using Context): FreshContext = + val base = ctx.base + import base._ + val nestedCtx = + if inUse < pool.size then + pool(inUse).reuseIn(ctx) + else + val c = fresh() + pool += c + c + inUse += 1 + nestedCtx + + final def free(): Unit = + inUse -= 1 + end ContextPool + + class TSContextPool extends ContextPool: + override def next()(using Context) = + val nextCtx = super.next() + nextCtx.typerState.init(ctx.typerState, ctx.typerState.constraint) + nextCtx + + class FreshTSContextPool extends TSContextPool: + override protected def fresh()(using Context) = + super.fresh().setTyperState(ctx.typerState.fresh(committable = true)) + + class ExploreContextPool extends TSContextPool: + override protected def fresh()(using Context) = + val ts = TyperState() + .setReporter(ExploringReporter()) + .setCommittable(false) + super.fresh().setTyperState(ts) + /** The essential mutable state of a context base, collected into a common class */ class ContextState { // Symbols state @@ -922,22 +977,27 @@ object Contexts { // Phases state - private[core] var phasesPlan: List[List[Phase]] = _ + private[core] var phasesPlan: List[List[Phase]] = uninitialized /** Phases by id */ - private[dotc] var phases: Array[Phase] = _ + private[dotc] var phases: Array[Phase] = uninitialized /** Phases with consecutive Transforms grouped into a single phase, Empty array if fusion is disabled */ private[core] var fusedPhases: Array[Phase] = Array.empty[Phase] /** Next denotation transformer id */ - private[core] var nextDenotTransformerId: Array[Int] = _ + private[core] var nextDenotTransformerId: Array[Int] = uninitialized - private[core] var denotTransformers: Array[DenotTransformer] = _ + private[core] var denotTransformers: Array[DenotTransformer] = uninitialized /** Flag to suppress inlining, set after overflow */ private[dotc] var stopInlining: Boolean = false + /** Cached -Yno-double-bindings setting. This is accessed from `setDenot`, which + * is fairly hot, so we don't want to lookup the setting each time it is called. + */ + private[dotc] var checkNoDoubleBindings = false + /** A variable that records that some error was reported in a globally committable context. * The error will not necessarlily be emitted, since it could still be that * the enclosing context will be aborted. The variable is used as a smoke test @@ -954,11 +1014,9 @@ object Contexts { protected[dotc] val indentTab: String = " " - private[Contexts] val exploreContexts = new mutable.ArrayBuffer[FreshContext] - private[Contexts] var exploresInUse: Int = 0 - - private[Contexts] val changeOwnerContexts = new mutable.ArrayBuffer[FreshContext] - private[Contexts] var changeOwnersInUse: Int = 0 + val exploreContextPool = ExploreContextPool() + val freshTSContextPool = FreshTSContextPool() + val generalContextPool = ContextPool() private[Contexts] val comparers = new mutable.ArrayBuffer[TypeComparer] private[Contexts] var comparersInUse: Int = 0 @@ -967,7 +1025,7 @@ object Contexts { private[core] val reusableDataReader = ReusableInstance(new ReusableDataReader()) - private[dotc] var wConfCache: (List[String], WConf) = _ + private[dotc] var wConfCache: (List[String], WConf) = uninitialized def sharedCharArray(len: Int): Array[Char] = while len > charArray.length do diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index d392a4e3079a..4ef0dbc9a43b 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -11,7 +11,7 @@ import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ import transform.MegaPhase import reporting.{Message, NoExplanation} -/** This object provides useful implicit decorators for types defined elsewhere */ +/** This object provides useful extension methods for types defined elsewhere */ object Decorators { /** Extension methods for toType/TermName methods on PreNames. @@ -58,8 +58,11 @@ object Decorators { padding + s.replace("\n", "\n" + padding) end extension + /** Convert lazy string to message. To be with caution, since no message-defined + * formatting will be done on the string. + */ extension (str: => String) - def toMessage: Message = reporting.NoExplanation(str) + def toMessage: Message = NoExplanation(str)(using NoContext) /** Implements a findSymbol method on iterators of Symbols that * works like find but avoids Option, replacing None with NoSymbol. @@ -78,7 +81,7 @@ object Decorators { /** Implements filterConserve, zipWithConserve methods * on lists that avoid duplication of list nodes where feasible. */ - implicit class ListDecorator[T](val xs: List[T]) extends AnyVal { + extension [T](xs: List[T]) final def mapconserve[U](f: T => U): List[U] = { @tailrec @@ -207,11 +210,18 @@ object Decorators { } /** Union on lists seen as sets */ - def | (ys: List[T]): List[T] = xs ::: (ys filterNot (xs contains _)) + def setUnion (ys: List[T]): List[T] = xs ::: ys.filterNot(xs contains _) - /** Intersection on lists seen as sets */ - def & (ys: List[T]): List[T] = xs filter (ys contains _) - } + /** Reduce left with `op` as long as list `xs` is not longer than `seqLimit`. + * Otherwise, split list in two half, reduce each, and combine with `op`. + */ + def reduceBalanced(op: (T, T) => T, seqLimit: Int = 100): T = + val len = xs.length + if len > seqLimit then + val (leading, trailing) = xs.splitAt(len / 2) + op(leading.reduceBalanced(op, seqLimit), trailing.reduceBalanced(op, seqLimit)) + else + xs.reduceLeft(op) extension [T, U](xss: List[List[T]]) def nestedMap(f: T => U): List[List[U]] = xss match @@ -269,17 +279,19 @@ object Decorators { catch case ex: CyclicReference => "... (caught cyclic reference) ..." case NonFatal(ex) - if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => - val msg = ex match { case te: TypeError => te.toMessage case _ => ex.getMessage } - s"[cannot display due to $msg, raw string = $x]" + if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => + s"... (cannot display due to ${ex.className} ${ex.getMessage}) ..." case _ => String.valueOf(x).nn + /** Returns the simple class name of `x`. */ + def className: String = x.getClass.getSimpleName.nn + extension [T](x: T) def assertingErrorsReported(using Context): T = { assert(ctx.reporter.errorsReported) x } - def assertingErrorsReported(msg: => String)(using Context): T = { + def assertingErrorsReported(msg: Message)(using Context): T = { assert(ctx.reporter.errorsReported, msg) x } @@ -289,21 +301,16 @@ object Decorators { if (xs.head eq x1) && (xs.tail eq xs1) then xs else x1 :: xs1 extension (sc: StringContext) + /** General purpose string formatting */ def i(args: Shown*)(using Context): String = new StringFormatter(sc).assemble(args) - /** Formatting for error messages: Like `i` but suppress follow-on - * error messages after the first one if some of their arguments are "non-sensical". - */ - def em(args: Shown*)(using Context): String = - forErrorMessages(new StringFormatter(sc).assemble(args)) - - /** Formatting with added explanations: Like `em`, but add explanations to - * give more info about type variables and to disambiguate where needed. + /** Interpolator yielding an error message, which undergoes + * the formatting defined in Message. */ - def ex(args: Shown*)(using Context): String = - explained(new StringFormatter(sc).assemble(args)) + def em(args: Shown*)(using Context): NoExplanation = + NoExplanation(i(args*)) extension [T <: AnyRef](arr: Array[T]) def binarySearch(x: T | Null): Int = java.util.Arrays.binarySearch(arr.asInstanceOf[Array[Object | Null]], x) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 174244b4a456..027aec16e9a3 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -86,7 +86,7 @@ class Definitions { newPermanentClassSymbol(ScalaPackageClass, name, Artifact, completer).entered } - /** The trait FunctionN, ContextFunctionN, ErasedFunctionN or ErasedContextFunction, for some N + /** The trait FunctionN and ContextFunctionN for some N * @param name The name of the trait to be created * * FunctionN traits follow this template: @@ -104,24 +104,9 @@ class Definitions { * trait ContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { * def apply(using $x0: T0, ..., $x{N_1}: T{N-1}): R * } - * - * ErasedFunctionN traits follow this template: - * - * trait ErasedFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedContextFunctionN traits follow this template: - * - * trait ErasedContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(using erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedFunctionN and ErasedContextFunctionN erase to Function0. - * * ImpureXYZFunctionN follow this template: * - * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {*} XYZFunctionN[T0,...,T{N-1}, R] + * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {cap} XYZFunctionN[T0,...,T{N-1}, R] */ private def newFunctionNType(name: TypeName): Symbol = { val impure = name.startsWith("Impure") @@ -149,8 +134,7 @@ class Definitions { val resParamRef = enterTypeParam(cls, paramNamePrefix ++ "R", Covariant, decls).typeRef val methodType = MethodType.companion( isContextual = name.isContextFunction, - isImplicit = false, - isErased = name.isErasedFunction) + isImplicit = false) decls.enter(newMethod(cls, nme.apply, methodType(argParamRefs, resParamRef), Deferred)) denot.info = ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) @@ -518,8 +502,8 @@ class Definitions { def staticsMethod(name: PreName): TermSymbol = ScalaStaticsModule.requiredMethod(name) @tu lazy val DottyArraysModule: Symbol = requiredModule("scala.runtime.Arrays") - def newGenericArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") - def newArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newArray") + @tu lazy val newGenericArrayMethod: TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") + @tu lazy val newArrayMethod: TermSymbol = DottyArraysModule.requiredMethod("newArray") def getWrapVarargsArrayModule: Symbol = ScalaRuntimeModule @@ -530,9 +514,12 @@ class Definitions { }) @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") + def ListType: TypeRef = ListClass.typeRef @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") + def NilType: TermRef = NilModule.termRef @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") + def ConsType: TypeRef = ConsClass.typeRef @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") @tu lazy val SingletonClass: ClassSymbol = @@ -644,6 +631,8 @@ class Definitions { @tu lazy val RepeatedParamClass: ClassSymbol = enterSpecialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType)) + @tu lazy val IntoType: TypeSymbol = enterAliasType(tpnme.INTO, HKTypeLambda(TypeBounds.empty :: Nil)(_.paramRefs(0))) + // fundamental classes @tu lazy val StringClass: ClassSymbol = requiredClass("java.lang.String") def StringType: Type = StringClass.typeRef @@ -699,6 +688,7 @@ class Definitions { @tu lazy val JavaCalendarClass: ClassSymbol = requiredClass("java.util.Calendar") @tu lazy val JavaDateClass: ClassSymbol = requiredClass("java.util.Date") @tu lazy val JavaFormattableClass: ClassSymbol = requiredClass("java.util.Formattable") + @tu lazy val JavaRecordClass: Symbol = getClassIfDefined("java.lang.Record") @tu lazy val JavaEnumClass: ClassSymbol = { val cls = requiredClass("java.lang.Enum") @@ -732,6 +722,10 @@ class Definitions { } def JavaEnumType = JavaEnumClass.typeRef + @tu lazy val MethodHandleClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandle") + @tu lazy val MethodHandlesLookupClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandles.Lookup") + @tu lazy val VarHandleClass: ClassSymbol = requiredClass("java.lang.invoke.VarHandle") + @tu lazy val StringBuilderClass: ClassSymbol = requiredClass("scala.collection.mutable.StringBuilder") @tu lazy val MatchErrorClass : ClassSymbol = requiredClass("scala.MatchError") @tu lazy val ConversionClass : ClassSymbol = requiredClass("scala.Conversion").typeRef.symbol.asClass @@ -805,9 +799,12 @@ class Definitions { @tu lazy val ReflectPackageClass: Symbol = requiredPackage("scala.reflect.package").moduleClass @tu lazy val ClassTagClass: ClassSymbol = requiredClass("scala.reflect.ClassTag") + @tu lazy val ClassTagClass_unapply: Symbol = ClassTagClass.requiredMethod("unapply") @tu lazy val ClassTagModule: Symbol = ClassTagClass.companionModule @tu lazy val ClassTagModule_apply: Symbol = ClassTagModule.requiredMethod(nme.apply) + @tu lazy val ReflectSelectableTypeRef: TypeRef = requiredClassRef("scala.reflect.Selectable") + @tu lazy val TypeTestClass: ClassSymbol = requiredClass("scala.reflect.TypeTest") @tu lazy val TypeTest_unapply: Symbol = TypeTestClass.requiredMethod(nme.unapply) @tu lazy val TypeTestModule_identity: Symbol = TypeTestClass.companionModule.requiredMethod(nme.identity) @@ -856,7 +853,12 @@ class Definitions { @tu lazy val QuoteMatchingClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteMatching") @tu lazy val QuoteMatching_ExprMatch: Symbol = QuoteMatchingClass.requiredMethod("ExprMatch") + @tu lazy val QuoteMatching_ExprMatchModule: Symbol = QuoteMatchingClass.requiredClass("ExprMatchModule") @tu lazy val QuoteMatching_TypeMatch: Symbol = QuoteMatchingClass.requiredMethod("TypeMatch") + @tu lazy val QuoteMatching_TypeMatchModule: Symbol = QuoteMatchingClass.requiredClass("TypeMatchModule") + @tu lazy val QuoteMatchingModule: Symbol = requiredModule("scala.quoted.runtime.QuoteMatching") + @tu lazy val QuoteMatching_KNil: Symbol = QuoteMatchingModule.requiredType("KNil") + @tu lazy val QuoteMatching_KCons: Symbol = QuoteMatchingModule.requiredType("KCons") @tu lazy val ToExprModule: Symbol = requiredModule("scala.quoted.ToExpr") @tu lazy val ToExprModule_BooleanToExpr: Symbol = ToExprModule.requiredMethod("BooleanToExpr") @@ -889,6 +891,8 @@ class Definitions { @tu lazy val QuotedTypeModule: Symbol = QuotedTypeClass.companionModule @tu lazy val QuotedTypeModule_of: Symbol = QuotedTypeModule.requiredMethod("of") + @tu lazy val MacroAnnotationClass: ClassSymbol = requiredClass("scala.annotation.MacroAnnotation") + @tu lazy val CanEqualClass: ClassSymbol = getClassIfDefined("scala.Eql").orElse(requiredClass("scala.CanEqual")).asClass def CanEqual_canEqualAny(using Context): TermSymbol = val methodName = if CanEqualClass.name == tpnme.Eql then nme.eqlAny else nme.canEqualAny @@ -960,18 +964,25 @@ class Definitions { def TupledFunctionClass(using Context): ClassSymbol = TupledFunctionTypeRef.symbol.asClass def RuntimeTupleFunctionsModule(using Context): Symbol = requiredModule("scala.runtime.TupledFunctions") + @tu lazy val boundaryModule: Symbol = requiredModule("scala.util.boundary") + @tu lazy val LabelClass: Symbol = requiredClass("scala.util.boundary.Label") + @tu lazy val BreakClass: Symbol = requiredClass("scala.util.boundary.Break") + @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") - @tu lazy val Caps_unsafeBox: Symbol = CapsModule.requiredMethod("unsafeBox") - @tu lazy val Caps_unsafeUnbox: Symbol = CapsModule.requiredMethod("unsafeUnbox") - @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") + @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") + @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") + @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") + @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") + @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") + @tu lazy val Caps_SealedAnnot: ClassSymbol = requiredClass("scala.caps.Sealed") // Annotation base classes @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") - @tu lazy val ClassfileAnnotationClass: ClassSymbol = requiredClass("scala.annotation.ClassfileAnnotation") @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") // Annotation classes + @tu lazy val AllowConversionsAnnot: ClassSymbol = requiredClass("scala.annotation.allowConversions") @tu lazy val AnnotationDefaultAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AnnotationDefault") @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") @@ -991,6 +1002,7 @@ class Definitions { @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") @tu lazy val MigrationAnnot: ClassSymbol = requiredClass("scala.annotation.migration") @tu lazy val NowarnAnnot: ClassSymbol = requiredClass("scala.annotation.nowarn") + @tu lazy val UnusedAnnot: ClassSymbol = requiredClass("scala.annotation.unused") @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") @@ -1011,12 +1023,17 @@ class Definitions { @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") + @tu lazy val UncheckedCapturesAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedCaptures") @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") + @tu lazy val BeanGetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanGetter") + @tu lazy val BeanSetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanSetter") @tu lazy val FieldMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.field") @tu lazy val GetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.getter") @tu lazy val ParamMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.param") @tu lazy val SetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.setter") + @tu lazy val CompanionClassMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.companionClass") + @tu lazy val CompanionMethodMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.companionMethod") @tu lazy val ShowAsInfixAnnot: ClassSymbol = requiredClass("scala.annotation.showAsInfix") @tu lazy val FunctionalInterfaceAnnot: ClassSymbol = requiredClass("java.lang.FunctionalInterface") @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") @@ -1029,8 +1046,10 @@ class Definitions { @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") // A list of meta-annotations that are relevant for fields and accessors - @tu lazy val FieldAccessorMetaAnnots: Set[Symbol] = - Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot) + @tu lazy val NonBeanMetaAnnots: Set[Symbol] = + Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot, CompanionClassMetaAnnot, CompanionMethodMetaAnnot) + @tu lazy val MetaAnnots: Set[Symbol] = + NonBeanMetaAnnots + BeanGetterMetaAnnot + BeanSetterMetaAnnot // A list of annotations that are commonly used to indicate that a field/method argument or return // type is not null. These annotations are used by the nullification logic in JavaNullInterop to @@ -1080,15 +1099,23 @@ class Definitions { sym.owner.linkedClass.typeRef object FunctionOf { - def apply(args: List[Type], resultType: Type, isContextual: Boolean = false, isErased: Boolean = false)(using Context): Type = - FunctionType(args.length, isContextual, isErased).appliedTo(args ::: resultType :: Nil) - def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean, Boolean)] = { - val tsym = ft.typeSymbol - if isFunctionClass(tsym) && ft.isRef(tsym) then - val targs = ft.dealias.argInfos - if (targs.isEmpty) None - else Some(targs.init, targs.last, tsym.name.isContextFunction, tsym.name.isErasedFunction) - else None + def apply(args: List[Type], resultType: Type, isContextual: Boolean = false)(using Context): Type = + val mt = MethodType.companion(isContextual, false)(args, resultType) + if mt.hasErasedParams then + RefinedType(ErasedFunctionClass.typeRef, nme.apply, mt) + else + FunctionType(args.length, isContextual).appliedTo(args ::: resultType :: Nil) + def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean)] = { + ft.dealias match + case RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) => + Some(mt.paramInfos, mt.resType, mt.isContextualMethod) + case _ => + val tsym = ft.dealias.typeSymbol + if isFunctionSymbol(tsym) && ft.isRef(tsym) then + val targs = ft.dealias.argInfos + if (targs.isEmpty) None + else Some(targs.init, targs.last, tsym.name.isContextFunction) + else None } } @@ -1347,6 +1374,15 @@ class Definitions { @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) + /** Base classes that are assumed to be pure for the purposes of capture checking. + * Every class inheriting from a pure baseclass is pure. + */ + @tu lazy val pureBaseClasses = Set(defn.AnyValClass, defn.ThrowableClass) + + /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, + */ + @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) @@ -1389,8 +1425,8 @@ class Definitions { val classRefs1 = new Array[TypeRef | Null](classRefs.length * 2) Array.copy(classRefs, 0, classRefs1, 0, classRefs.length) classRefs = classRefs1 - val funName = s"scala.$prefix$n" if classRefs(n) == null then + val funName = s"scala.$prefix$n" classRefs(n) = if prefix.startsWith("Impure") then staticRef(funName.toTypeName).symbol.typeRef @@ -1398,24 +1434,22 @@ class Definitions { classRefs(n).nn end FunType - private def funTypeIdx(isContextual: Boolean, isErased: Boolean, isImpure: Boolean): Int = + private def funTypeIdx(isContextual: Boolean, isImpure: Boolean): Int = (if isContextual then 1 else 0) - + (if isErased then 2 else 0) - + (if isImpure then 4 else 0) + + (if isImpure then 2 else 0) private val funTypeArray: IArray[FunType] = val arr = Array.ofDim[FunType](8) val choices = List(false, true) - for contxt <- choices; erasd <- choices; impure <- choices do + for contxt <- choices; impure <- choices do var str = "Function" if contxt then str = "Context" + str - if erasd then str = "Erased" + str if impure then str = "Impure" + str - arr(funTypeIdx(contxt, erasd, impure)) = FunType(str) + arr(funTypeIdx(contxt, impure)) = FunType(str) IArray.unsafeFromArray(arr) - def FunctionSymbol(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = - funTypeArray(funTypeIdx(isContextual, isErased, isImpure))(n).symbol + def FunctionSymbol(n: Int, isContextual: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = + funTypeArray(funTypeIdx(isContextual, isImpure))(n).symbol @tu lazy val Function0_apply: Symbol = Function0.requiredMethod(nme.apply) @tu lazy val ContextFunction0_apply: Symbol = ContextFunction0.requiredMethod(nme.apply) @@ -1425,12 +1459,14 @@ class Definitions { @tu lazy val Function2: Symbol = FunctionSymbol(2) @tu lazy val ContextFunction0: Symbol = FunctionSymbol(0, isContextual = true) - def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = - FunctionSymbol(n, isContextual && !ctx.erasedTypes, isErased, isImpure).typeRef + def FunctionType(n: Int, isContextual: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = + FunctionSymbol(n, isContextual && !ctx.erasedTypes, isImpure).typeRef lazy val PolyFunctionClass = requiredClass("scala.PolyFunction") def PolyFunctionType = PolyFunctionClass.typeRef + lazy val ErasedFunctionClass = requiredClass("scala.runtime.ErasedFunction") + /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */ def scalaClassName(cls: Symbol)(using Context): TypeName = cls.denot match case clsd: ClassDenotation if clsd.owner eq ScalaPackageClass => @@ -1463,8 +1499,6 @@ class Definitions { * - FunctionXXL * - FunctionN for N >= 0 * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 */ def isFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isFunction @@ -1479,16 +1513,9 @@ class Definitions { /** Is an context function class. * - ContextFunctionN for N >= 0 - * - ErasedContextFunctionN for N > 0 */ def isContextFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isContextFunction - /** Is an erased function class. - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 - */ - def isErasedFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isErasedFunction - /** Is either FunctionXXL or a class that will be erased to FunctionXXL * - FunctionXXL * - FunctionN for N >= 22 @@ -1525,8 +1552,7 @@ class Definitions { */ def functionTypeErasure(cls: Symbol): Type = val arity = scalaClassName(cls).functionArity - if cls.name.isErasedFunction then FunctionType(0) - else if arity > 22 then FunctionXXLClass.typeRef + if arity > 22 then FunctionXXLClass.typeRef else if arity >= 0 then FunctionType(arity) else NoType @@ -1541,12 +1567,21 @@ class Definitions { private val PredefImportFns: RootRef = RootRef(() => ScalaPredefModule.termRef, isPredef=true) - @tu private lazy val JavaRootImportFns: List[RootRef] = - if ctx.settings.YnoImports.value then Nil - else JavaImportFns + @tu private lazy val YimportsImportFns: List[RootRef] = ctx.settings.Yimports.value.map { name => + val denot = + getModuleIfDefined(name).suchThat(_.is(Module)) `orElse` + getPackageClassIfDefined(name).suchThat(_.is(Package)) + if !denot.exists then + report.error(s"error: bad preamble import $name") + val termRef = denot.symbol.termRef + RootRef(() => termRef) + } + + @tu private lazy val JavaRootImportFns: List[RootRef] = JavaImportFns @tu private lazy val ScalaRootImportFns: List[RootRef] = - if ctx.settings.YnoImports.value then Nil + if !ctx.settings.Yimports.isDefault then YimportsImportFns + else if ctx.settings.YnoImports.value then Nil else if ctx.settings.YnoPredef.value then ScalaImportFns else ScalaImportFns :+ PredefImportFns @@ -1636,6 +1671,15 @@ class Definitions { rec(tp.stripTypeVar, Nil, bound) } + def isSmallGenericTuple(tp: Type)(using Context): Boolean = + if tp.derivesFrom(defn.PairClass) && !defn.isTupleNType(tp.widenDealias) then + // If this is a generic tuple we need to cast it to make the TupleN/ members accessible. + // This works only for generic tuples of known size up to 22. + defn.tupleTypes(tp.widenTermRefExpr) match + case Some(elems) if elems.length <= Definitions.MaxTupleArity => true + case _ => false + else false + def isProductSubType(tp: Type)(using Context): Boolean = tp.derivesFrom(ProductClass) /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ContextFunctionN @@ -1648,16 +1692,29 @@ class Definitions { arity >= 0 && isFunctionClass(sym) && tp.isRef( - FunctionType(arity, sym.name.isContextFunction, sym.name.isErasedFunction).typeSymbol, + FunctionType(arity, sym.name.isContextFunction).typeSymbol, skipRefined = false) end isNonRefinedFunction - /** Is `tp` a representation of a (possibly dependent) function type or an alias of such? */ + /** Returns whether `tp` is an instance or a refined instance of: + * - scala.FunctionN + * - scala.ContextFunctionN + */ def isFunctionType(tp: Type)(using Context): Boolean = isNonRefinedFunction(tp.dropDependentRefinement) + /** Is `tp` a specialized, refined function type? Either an `ErasedFunction` or a `PolyFunction`. */ + def isRefinedFunctionType(tp: Type)(using Context): Boolean = + tp.derivesFrom(defn.PolyFunctionClass) || isErasedFunctionType(tp) + + /** Returns whether `tp` is an instance or a refined instance of: + * - scala.FunctionN + * - scala.ContextFunctionN + * - ErasedFunction + * - PolyFunction + */ def isFunctionOrPolyType(tp: Type)(using Context): Boolean = - isFunctionType(tp) || (tp.typeSymbol eq defn.PolyFunctionClass) + isFunctionType(tp) || isRefinedFunctionType(tp) private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = for base <- bases; tp <- paramTypes do @@ -1746,7 +1803,7 @@ class Definitions { @tu lazy val FunctionSpecializedApplyNames: collection.Set[Name] = Function0SpecializedApplyNames ++ Function1SpecializedApplyNames ++ Function2SpecializedApplyNames - def functionArity(tp: Type)(using Context): Int = tp.dropDependentRefinement.dealias.argInfos.length - 1 + def functionArity(tp: Type)(using Context): Int = tp.functionArgInfos.length - 1 /** Return underlying context function type (i.e. instance of an ContextFunctionN class) * or NoType if none exists. The following types are considered as underlying types: @@ -1758,6 +1815,8 @@ class Definitions { tp.stripTypeVar.dealias match case tp1: TypeParamRef if ctx.typerState.constraint.contains(tp1) => asContextFunctionType(TypeComparer.bounds(tp1).hiBound) + case tp1 @ RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) && mt.isContextualMethod => + tp1 case tp1 => if tp1.typeSymbol.name.isContextFunction && isFunctionType(tp1) then tp1 else NoType @@ -1771,18 +1830,28 @@ class Definitions { * types `As`, the result type `B` and a whether the type is an erased context function. */ object ContextFunctionType: - def unapply(tp: Type)(using Context): Option[(List[Type], Type, Boolean)] = + def unapply(tp: Type)(using Context): Option[(List[Type], Type, List[Boolean])] = if ctx.erasedTypes then atPhase(erasurePhase)(unapply(tp)) else - val tp1 = asContextFunctionType(tp) - if tp1.exists then - val args = tp1.dropDependentRefinement.argInfos - Some((args.init, args.last, tp1.typeSymbol.name.isErasedFunction)) - else None + asContextFunctionType(tp) match + case RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) => + Some((mt.paramInfos, mt.resType, mt.erasedParams)) + case tp1 if tp1.exists => + val args = tp1.functionArgInfos + val erasedParams = erasedFunctionParameters(tp1) + Some((args.init, args.last, erasedParams)) + case _ => None + + /* Returns a list of erased booleans marking whether parameters are erased, for a function type. */ + def erasedFunctionParameters(tp: Type)(using Context): List[Boolean] = tp.dealias match { + case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams + case tp if isFunctionType(tp) => List.fill(functionArity(tp)) { false } + case _ => Nil + } def isErasedFunctionType(tp: Type)(using Context): Boolean = - tp.dealias.typeSymbol.name.isErasedFunction && isFunctionType(tp) + tp.derivesFrom(defn.ErasedFunctionClass) /** A whitelist of Scala-2 classes that are known to be pure */ def isAssuredNoInits(sym: Symbol): Boolean = @@ -1827,20 +1896,53 @@ class Definitions { def isInfix(sym: Symbol)(using Context): Boolean = (sym eq Object_eq) || (sym eq Object_ne) - @tu lazy val assumedTransparentTraits = - Set[Symbol](ComparableClass, ProductClass, SerializableClass, - // add these for now, until we had a chance to retrofit 2.13 stdlib - // we should do a more through sweep through it then. - requiredClass("scala.collection.SortedOps"), - requiredClass("scala.collection.StrictOptimizedSortedSetOps"), - requiredClass("scala.collection.generic.DefaultSerializable"), - requiredClass("scala.collection.generic.IsIterable"), - requiredClass("scala.collection.generic.IsIterableOnce"), - requiredClass("scala.collection.generic.IsMap"), - requiredClass("scala.collection.generic.IsSeq"), - requiredClass("scala.collection.generic.Subtractable"), - requiredClass("scala.collection.immutable.StrictOptimizedSeqOps") - ) + @tu lazy val assumedTransparentNames: Map[Name, Set[Symbol]] = + // add these for now, until we had a chance to retrofit 2.13 stdlib + // we should do a more through sweep through it then. + val strs = Map( + "Any" -> Set("scala"), + "AnyVal" -> Set("scala"), + "Matchable" -> Set("scala"), + "Product" -> Set("scala"), + "Object" -> Set("java.lang"), + "Comparable" -> Set("java.lang"), + "Serializable" -> Set("java.io"), + "BitSetOps" -> Set("scala.collection"), + "IndexedSeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "IterableOnceOps" -> Set("scala.collection"), + "IterableOps" -> Set("scala.collection"), + "LinearSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "MapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedMapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedOps" -> Set("scala.collection"), + "SortedSetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "StrictOptimizedIterableOps" -> Set("scala.collection"), + "StrictOptimizedLinearSeqOps" -> Set("scala.collection"), + "StrictOptimizedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "ArrayDequeOps" -> Set("scala.collection.mutable"), + "DefaultSerializable" -> Set("scala.collection.generic"), + "IsIterable" -> Set("scala.collection.generic"), + "IsIterableLowPriority" -> Set("scala.collection.generic"), + "IsIterableOnce" -> Set("scala.collection.generic"), + "IsIterableOnceLowPriority" -> Set("scala.collection.generic"), + "IsMap" -> Set("scala.collection.generic"), + "IsSeq" -> Set("scala.collection.generic")) + strs.map { case (simple, pkgs) => ( + simple.toTypeName, + pkgs.map(pkg => staticRef(pkg.toTermName, isPackage = true).symbol.moduleClass) + ) + } + + def isAssumedTransparent(sym: Symbol): Boolean = + assumedTransparentNames.get(sym.name) match + case Some(pkgs) => pkgs.contains(sym.owner) + case none => false // ----- primitive value class machinery ------------------------------------------ @@ -1962,6 +2064,7 @@ class Definitions { orType, RepeatedParamClass, ByNameParamClass2x, + IntoType, AnyValClass, NullClass, NothingClass, diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index f267e6c85e03..e56cc453d34d 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -175,7 +175,7 @@ object Denotations { * * @param symbol The referencing symbol, or NoSymbol is none exists */ - abstract class Denotation(val symbol: Symbol, protected var myInfo: Type) extends PreDenotation with printing.Showable { + abstract class Denotation(val symbol: Symbol, protected var myInfo: Type, val isType: Boolean) extends PreDenotation with printing.Showable { type AsSeenFromResult <: Denotation /** The type info. @@ -194,12 +194,6 @@ object Denotations { */ def infoOrCompleter: Type - /** The period during which this denotation is valid. */ - def validFor: Period - - /** Is this a reference to a type symbol? */ - def isType: Boolean - /** Is this a reference to a term symbol? */ def isTerm: Boolean = !isType @@ -229,6 +223,15 @@ object Denotations { */ def current(using Context): Denotation + /** The period during which this denotation is valid. */ + private var myValidFor: Period = Nowhere + + final def validFor: Period = myValidFor + final def validFor_=(p: Period): Unit = { + myValidFor = p + symbol.invalidateDenotCache() + } + /** Is this denotation different from NoDenotation or an ErrorDenotation? */ def exists: Boolean = true @@ -300,9 +303,9 @@ object Denotations { case NoDenotation | _: NoQualifyingRef | _: MissingRef => def argStr = if (args.isEmpty) "" else i" matching ($args%, %)" val msg = - if (site.exists) i"$site does not have a member $kind $name$argStr" - else i"missing: $kind $name$argStr" - throw new TypeError(msg) + if site.exists then em"$site does not have a member $kind $name$argStr" + else em"missing: $kind $name$argStr" + throw TypeError(msg) case denot => denot.symbol } @@ -542,8 +545,7 @@ object Denotations { tp2 match case tp2: MethodType if TypeComparer.matchingMethodParams(tp1, tp2) - && tp1.isImplicitMethod == tp2.isImplicitMethod - && tp1.isErasedMethod == tp2.isErasedMethod => + && tp1.isImplicitMethod == tp2.isImplicitMethod => val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) if resType.exists then tp1.derivedLambdaType(mergeParamNames(tp1, tp2), tp1.paramInfos, resType) @@ -571,7 +573,7 @@ object Denotations { end infoMeet /** A non-overloaded denotation */ - abstract class SingleDenotation(symbol: Symbol, initInfo: Type) extends Denotation(symbol, initInfo) { + abstract class SingleDenotation(symbol: Symbol, initInfo: Type, isType: Boolean) extends Denotation(symbol, initInfo, isType) { protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation final def name(using Context): Name = symbol.name @@ -610,16 +612,13 @@ object Denotations { */ def signature(sourceLanguage: SourceLanguage)(using Context): Signature = if (isType) Signature.NotAMethod // don't force info if this is a type denotation - else info match { + else info match case info: MethodOrPoly => try info.signature(sourceLanguage) - catch { // !!! DEBUG - case scala.util.control.NonFatal(ex) => - report.echo(s"cannot take signature of $info") - throw ex - } + catch case ex: Exception => + if ctx.debug then report.echo(s"cannot take signature of $info") + throw ex case _ => Signature.NotAMethod - } def derivedSingleDenotation(symbol: Symbol, info: Type, pre: Type = this.prefix, isRefinedMethod: Boolean = this.isRefinedMethod)(using Context): SingleDenotation = if ((symbol eq this.symbol) && (info eq this.info) && (pre eq this.prefix) && (isRefinedMethod == this.isRefinedMethod)) this @@ -644,15 +643,19 @@ object Denotations { def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): SingleDenotation = val situated = if site == NoPrefix then this else asSeenFrom(site) - val sigMatches = sig.matchDegree(situated.signature) match - case FullMatch => - true - case MethodNotAMethodMatch => - // See comment in `matches` - relaxed && !symbol.is(JavaDefined) - case ParamMatch => - relaxed - case noMatch => + val sigMatches = + try + sig.matchDegree(situated.signature) match + case FullMatch => + true + case MethodNotAMethodMatch => + // See comment in `matches` + relaxed && !symbol.is(JavaDefined) + case ParamMatch => + relaxed + case noMatch => + false + catch case ex: MissingType => false if sigMatches && symbol.hasTargetName(targetName) then this else NoDenotation @@ -663,14 +666,6 @@ object Denotations { // ------ Transformations ----------------------------------------- - private var myValidFor: Period = Nowhere - - def validFor: Period = myValidFor - def validFor_=(p: Period): Unit = { - myValidFor = p - symbol.invalidateDenotCache() - } - /** The next SingleDenotation in this run, with wrap-around from last to first. * * There may be several `SingleDenotation`s with different validity @@ -694,7 +689,7 @@ object Denotations { if (validFor.firstPhaseId <= 1) this else { var current = nextInRun - while (current.validFor.code > this.myValidFor.code) current = current.nextInRun + while (current.validFor.code > this.validFor.code) current = current.nextInRun current } @@ -775,7 +770,7 @@ object Denotations { * are otherwise undefined. */ def skipRemoved(using Context): SingleDenotation = - if (myValidFor.code <= 0) nextDefined else this + if (validFor.code <= 0) nextDefined else this /** Produce a denotation that is valid for the given context. * Usually called when !(validFor contains ctx.period) @@ -792,15 +787,13 @@ object Denotations { def current(using Context): SingleDenotation = util.Stats.record("current") val currentPeriod = ctx.period - val valid = myValidFor + val valid = validFor def assertNotPackage(d: SingleDenotation, transformer: DenotTransformer) = d match case d: ClassDenotation => assert(!d.is(Package), s"illegal transformation of package denotation by transformer $transformer") case _ => - def escapeToNext = nextDefined.ensuring(_.validFor != Nowhere) - def toNewRun = util.Stats.record("current.bringForward") if exists then initial.bringForward().current else this @@ -836,9 +829,6 @@ object Denotations { // creations that way, and also avoid phase caches in contexts to get large. // To work correctly, we need to demand that the context with the new phase // is not retained in the result. - catch case ex: CyclicReference => - // println(s"error while transforming $this") - throw ex finally mutCtx.setPeriod(savedPeriod) if next eq cur then @@ -875,7 +865,7 @@ object Denotations { // can happen if we sit on a stale denotation which has been replaced // wholesale by an installAfter; in this case, proceed to the next // denotation and try again. - escapeToNext + nextDefined else if valid.runId != currentPeriod.runId then toNewRun else if currentPeriod.code > valid.code then @@ -962,7 +952,7 @@ object Denotations { case denot: SymDenotation => s"in ${denot.owner}" case _ => "" } - s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${myValidFor}, is referred to in run ${ctx.period}" + s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${validFor}, is referred to in run ${ctx.period}" } /** The period (interval of phases) for which there exists @@ -1148,9 +1138,9 @@ object Denotations { acc(false, symbol.info) } - abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) extends SingleDenotation(symbol, initInfo) { + abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) + extends SingleDenotation(symbol, initInfo, initInfo.isInstanceOf[TypeType]) { def infoOrCompleter: Type = initInfo - def isType: Boolean = infoOrCompleter.isInstanceOf[TypeType] } class UniqueRefDenotation( @@ -1246,10 +1236,10 @@ object Denotations { /** An overloaded denotation consisting of the alternatives of both given denotations. */ - case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType) with MultiPreDenotation { + case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType, isType = false) with MultiPreDenotation { + validFor = denot1.validFor & denot2.validFor + final def infoOrCompleter: Type = multiHasNot("info") - final def validFor: Period = denot1.validFor & denot2.validFor - final def isType: Boolean = false final def hasUniqueSym: Boolean = false final def name(using Context): Name = denot1.name final def signature(using Context): Signature = Signature.OverloadedSignature @@ -1279,8 +1269,8 @@ object Denotations { def hasAltWith(p: SingleDenotation => Boolean): Boolean = denot1.hasAltWith(p) || denot2.hasAltWith(p) def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = { - val d1 = denot1 accessibleFrom (pre, superAccess) - val d2 = denot2 accessibleFrom (pre, superAccess) + val d1 = denot1.accessibleFrom(pre, superAccess) + val d2 = denot2.accessibleFrom(pre, superAccess) if (!d1.exists) d2 else if (!d2.exists) d1 else derivedUnionDenotation(d1, d2) diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 8bf65ed8288f..8100bea374eb 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -315,7 +315,7 @@ object Flags { val (SuperParamAliasOrScala2x @ _, SuperParamAlias @ _, Scala2x @ _) = newFlags(26, "", "") /** A parameter with a default value / an impure untpd.FunctionWithMods type */ - val (_, HasDefault @ _, Impure @ _) = newFlags(27, "", "<{*}>") + val (_, HasDefault @ _, Impure @ _) = newFlags(27, "", "") /** An extension method, or a collective extension instance */ val (Extension @ _, ExtensionMethod @ _, _) = newFlags(28, "") @@ -350,14 +350,14 @@ object Flags { /** Symbol is a method which should be marked ACC_SYNCHRONIZED */ val (_, Synchronized @ _, _) = newFlags(36, "") - /** Symbol is a Java-style varargs method */ - val (_, JavaVarargs @ _, _) = newFlags(37, "") + /** Symbol is a Java-style varargs method / a Java annotation */ + val (_, JavaVarargs @ _, JavaAnnotation @ _) = newFlags(37, "", "") /** Symbol is a Java default method */ val (_, DefaultMethod @ _, _) = newFlags(38, "") /** Symbol is a transparent inline method or trait */ - val (Transparent @ _, _, _) = newFlags(39, "transparent") + val (Transparent @ _, _, TransparentType @ _) = newFlags(39, "transparent") /** Symbol is an enum class or enum case (if used with case) */ val (Enum @ _, EnumVal @ _, _) = newFlags(40, "enum") @@ -477,7 +477,7 @@ object Flags { */ val AfterLoadFlags: FlagSet = commonFlags( FromStartFlags, AccessFlags, Final, AccessorOrSealed, - Abstract, LazyOrTrait, SelfName, JavaDefined, Transparent) + Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent) /** A value that's unstable unless complemented with a Stable flag */ val UnstableValueFlags: FlagSet = Mutable | Method @@ -604,10 +604,10 @@ object Flags { val Scala2Trait: FlagSet = Scala2x | Trait val SyntheticArtifact: FlagSet = Synthetic | Artifact val SyntheticCase: FlagSet = Synthetic | Case + val SyntheticMethod: FlagSet = Synthetic | Method val SyntheticModule: FlagSet = Synthetic | Module val SyntheticOpaque: FlagSet = Synthetic | Opaque val SyntheticParam: FlagSet = Synthetic | Param val SyntheticTermParam: FlagSet = Synthetic | TermParam val SyntheticTypeParam: FlagSet = Synthetic | TypeParam - val TransparentTrait: FlagSet = Trait | Transparent } diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 53fc58595472..bb65cce84042 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -2,36 +2,160 @@ package dotty.tools package dotc package core -import Decorators._ -import Contexts._ -import Types._ -import Symbols._ +import Contexts.*, Decorators.*, Symbols.*, Types.* +import NameKinds.UniqueName +import config.Printers.{gadts, gadtsConstr} import util.{SimpleIdentitySet, SimpleIdentityMap} -import collection.mutable import printing._ +import scala.annotation.tailrec +import scala.annotation.internal.sharable +import scala.collection.mutable + object GadtConstraint: - def apply(): GadtConstraint = empty - def empty: GadtConstraint = - new ProperGadtConstraint(OrderingConstraint.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, false) + @sharable val empty: GadtConstraint = + GadtConstraint(OrderingConstraint.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, false) /** Represents GADT constraints currently in scope */ -sealed trait GadtConstraint ( - private var myConstraint: Constraint, - private var mapping: SimpleIdentityMap[Symbol, TypeVar], - private var reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol], - private var wasConstrained: Boolean -) extends Showable { - this: ConstraintHandling => - - import dotty.tools.dotc.config.Printers.{gadts, gadtsConstr} - - /** Exposes ConstraintHandling.subsumes */ - def subsumes(left: GadtConstraint, right: GadtConstraint, pre: GadtConstraint)(using Context): Boolean = { - def extractConstraint(g: GadtConstraint) = g.constraint - subsumes(extractConstraint(left), extractConstraint(right), extractConstraint(pre)) +class GadtConstraint private ( + private val myConstraint: Constraint, + private val mapping: SimpleIdentityMap[Symbol, TypeVar], + private val reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol], + private val wasConstrained: Boolean, +) extends Showable: + def constraint: Constraint = myConstraint + def symbols: List[Symbol] = mapping.keys + def withConstraint(c: Constraint) = copy(myConstraint = c) + def withWasConstrained = copy(wasConstrained = true) + + def add(sym: Symbol, tv: TypeVar): GadtConstraint = copy( + mapping = mapping.updated(sym, tv), + reverseMapping = reverseMapping.updated(tv.origin, sym), + ) + + /** Is `sym1` ordered to be less than `sym2`? */ + def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = + constraint.isLess(tvarOrError(sym1).origin, tvarOrError(sym2).origin) + + /** Full bounds of `sym`, including TypeRefs to other lower/upper symbols. + * + * @note this performs subtype checks between ordered symbols. + * Using this in isSubType can lead to infinite recursion. Consider `bounds` instead. + */ + def fullBounds(sym: Symbol)(using Context): TypeBounds | Null = mapping(sym) match + case null => null + case tv: TypeVar => fullBounds(tv.origin) // .ensuring(containsNoInternalTypes(_)) + + /** Immediate bounds of `sym`. Does not contain lower/upper symbols (see [[fullBounds]]). */ + def bounds(sym: Symbol)(using Context): TypeBounds | Null = + mapping(sym) match + case null => null + case tv: TypeVar => + def retrieveBounds: TypeBounds = externalize(constraint.bounds(tv.origin)).bounds + retrieveBounds + //.showing(i"gadt bounds $sym: $result", gadts) + //.ensuring(containsNoInternalTypes(_)) + + /** Is the symbol registered in the constraint? + * + * @note this is true even if the symbol is constrained to be equal to another type, unlike [[Constraint.contains]]. + */ + def contains(sym: Symbol)(using Context): Boolean = mapping(sym) != null + + /** GADT constraint narrows bounds of at least one variable */ + def isNarrowing: Boolean = wasConstrained + + def fullBounds(param: TypeParamRef)(using Context): TypeBounds = + nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) + + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = + externalize(constraint.nonParamBounds(param)).bounds + + def fullLowerBound(param: TypeParamRef)(using Context): Type = + val self = externalize(param) + constraint.minLower(param).foldLeft(nonParamBounds(param).lo) { (acc, p) => + externalize(p) match + // drop any lower param that is a GADT symbol + // and is upper-bounded by a non-Any super-type of the original parameter + // e.g. in pos/i14287.min + // B$1 had info <: X and fullBounds >: B$2 <: X, and + // B$2 had info <: B$1 and fullBounds <: B$1 + // We can use the info of B$2 to drop the lower-bound of B$1 + // and return non-bidirectional bounds B$1 <: X and B$2 <: B$1. + case tp: TypeRef if tp.symbol.isPatternBound && self =:= tp.info.hiBound => acc + case tp => acc | tp + } + + def fullUpperBound(param: TypeParamRef)(using Context): Type = + val self = externalize(param) + constraint.minUpper(param).foldLeft(nonParamBounds(param).hi) { (acc, u) => + externalize(u) match + case tp: TypeRef if tp.symbol.isPatternBound && self =:= tp.info.loBound => acc // like fullLowerBound + case tp => + // Any as the upper bound means "no bound", but if F is higher-kinded, + // Any & F = F[_]; this is wrong for us so we need to short-circuit + if acc.isAny then tp else acc & tp + } + + def externalize(tp: Type, theMap: TypeMap | Null = null)(using Context): Type = tp match + case param: TypeParamRef => reverseMapping(param) match + case sym: Symbol => sym.typeRef + case null => param + case tp: TypeAlias => tp.derivedAlias(externalize(tp.alias, theMap)) + case tp => (if theMap == null then ExternalizeMap() else theMap).mapOver(tp) + + private class ExternalizeMap(using Context) extends TypeMap: + def apply(tp: Type): Type = externalize(tp, this)(using mapCtx) + + def tvarOrError(sym: Symbol)(using Context): TypeVar = + mapping(sym).ensuring(_ != null, i"not a constrainable symbol: $sym").uncheckedNN + + @tailrec final def stripInternalTypeVar(tp: Type): Type = tp match + case tv: TypeVar => + val inst = constraint.instType(tv) + if inst.exists then stripInternalTypeVar(inst) else tv + case _ => tp + + def internalize(tp: Type)(using Context): Type = tp match + case nt: NamedType => + val ntTvar = mapping(nt.symbol) + if ntTvar == null then tp + else ntTvar + case _ => tp + + private def containsNoInternalTypes(tp: Type, theAcc: TypeAccumulator[Boolean] | Null = null)(using Context): Boolean = tp match { + case tpr: TypeParamRef => !reverseMapping.contains(tpr) + case tv: TypeVar => !reverseMapping.contains(tv.origin) + case tp => + (if (theAcc != null) theAcc else new ContainsNoInternalTypesAccumulator()).foldOver(true, tp) } + private class ContainsNoInternalTypesAccumulator(using Context) extends TypeAccumulator[Boolean] { + override def apply(x: Boolean, tp: Type): Boolean = x && containsNoInternalTypes(tp, this) + } + + override def toText(printer: Printer): Texts.Text = printer.toText(this) + + /** Provides more information than toText, by showing the underlying Constraint details. */ + def debugBoundsDescription(using Context): String = i"$this\n$constraint" + + private def copy( + myConstraint: Constraint = myConstraint, + mapping: SimpleIdentityMap[Symbol, TypeVar] = mapping, + reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol] = reverseMapping, + wasConstrained: Boolean = wasConstrained, + ): GadtConstraint = GadtConstraint(myConstraint, mapping, reverseMapping, wasConstrained) +end GadtConstraint + +object GadtState: + def apply(gadt: GadtConstraint): GadtState = ProperGadtState(gadt) + +sealed trait GadtState { + this: ConstraintHandling => // Hide ConstraintHandling within GadtConstraintHandling + + def gadt: GadtConstraint + def gadt_=(g: GadtConstraint): Unit + override protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = // GADT constraints never involve wildcards and are not propagated outside // the case where they're valid, so no approximating is needed. @@ -52,22 +176,19 @@ sealed trait GadtConstraint ( // and used as orderings. def substDependentSyms(tp: Type, isUpper: Boolean)(using Context): Type = { def loop(tp: Type) = substDependentSyms(tp, isUpper) - tp match { + tp match case tp @ AndType(tp1, tp2) if !isUpper => tp.derivedAndType(loop(tp1), loop(tp2)) case tp @ OrType(tp1, tp2) if isUpper => tp.derivedOrType(loop(tp1), loop(tp2)) case tp: NamedType => - params.indexOf(tp.symbol) match { + params.indexOf(tp.symbol) match case -1 => - mapping(tp.symbol) match { + gadt.internalize(tp) match case tv: TypeVar => tv.origin - case null => tp - } + case _ => tp case i => pt.paramRefs(i) - } case tp => tp - } } val tb = param.info.bounds @@ -81,186 +202,87 @@ sealed trait GadtConstraint ( val tvars = params.lazyZip(poly1.paramRefs).map { (sym, paramRef) => val tv = TypeVar(paramRef, creatorState = null) - mapping = mapping.updated(sym, tv) - reverseMapping = reverseMapping.updated(tv.origin, sym) + gadt = gadt.add(sym, tv) tv } // The replaced symbols are picked up here. addToConstraint(poly1, tvars) - .showing(i"added to constraint: [$poly1] $params%, % gadt = $this", gadts) + .showing(i"added to constraint: [$poly1] $params%, % gadt = $gadt", gadts) } /** Further constrain a symbol already present in the constraint. */ def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean = { - @annotation.tailrec def stripInternalTypeVar(tp: Type): Type = tp match { - case tv: TypeVar => - val inst = constraint.instType(tv) - if (inst.exists) stripInternalTypeVar(inst) else tv - case _ => tp - } - - val symTvar: TypeVar = stripInternalTypeVar(tvarOrError(sym)) match { + val symTvar: TypeVar = gadt.stripInternalTypeVar(gadt.tvarOrError(sym)) match case tv: TypeVar => tv case inst => gadts.println(i"instantiated: $sym -> $inst") - return if (isUpper) isSub(inst, bound) else isSub(bound, inst) - } + return if isUpper then isSub(inst, bound) else isSub(bound, inst) - val internalizedBound = bound match { - case nt: NamedType => - val ntTvar = mapping(nt.symbol) - if (ntTvar != null) stripInternalTypeVar(ntTvar) else bound - case _ => bound - } + val internalizedBound = gadt.stripInternalTypeVar(gadt.internalize(bound)) val saved = constraint val result = internalizedBound match case boundTvar: TypeVar => - if (boundTvar eq symTvar) true - else if (isUpper) addLess(symTvar.origin, boundTvar.origin) + if boundTvar eq symTvar then true + else if isUpper + then addLess(symTvar.origin, boundTvar.origin) else addLess(boundTvar.origin, symTvar.origin) case bound => addBoundTransitively(symTvar.origin, bound, isUpper) gadts.println { - val descr = if (isUpper) "upper" else "lower" - val op = if (isUpper) "<:" else ">:" + val descr = if isUpper then "upper" else "lower" + val op = if isUpper then "<:" else ">:" i"adding $descr bound $sym $op $bound = $result" } - if constraint ne saved then wasConstrained = true + if constraint ne saved then gadt = gadt.withWasConstrained result } - /** Is `sym1` ordered to be less than `sym2`? */ - def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = - constraint.isLess(tvarOrError(sym1).origin, tvarOrError(sym2).origin) - - /** Full bounds of `sym`, including TypeRefs to other lower/upper symbols. - * - * @note this performs subtype checks between ordered symbols. - * Using this in isSubType can lead to infinite recursion. Consider `bounds` instead. - */ - def fullBounds(sym: Symbol)(using Context): TypeBounds | Null = - mapping(sym) match { - case null => null - // TODO: Improve flow typing so that ascription becomes redundant - case tv: TypeVar => - fullBounds(tv.origin) - // .ensuring(containsNoInternalTypes(_)) - } - - /** Immediate bounds of `sym`. Does not contain lower/upper symbols (see [[fullBounds]]). */ - def bounds(sym: Symbol)(using Context): TypeBounds | Null = - mapping(sym) match { - case null => null - // TODO: Improve flow typing so that ascription becomes redundant - case tv: TypeVar => - def retrieveBounds: TypeBounds = externalize(bounds(tv.origin)).bounds - retrieveBounds - //.showing(i"gadt bounds $sym: $result", gadts) - //.ensuring(containsNoInternalTypes(_)) - } - - /** Is the symbol registered in the constraint? - * - * @note this is true even if the symbol is constrained to be equal to another type, unlike [[Constraint.contains]]. - */ - def contains(sym: Symbol)(using Context): Boolean = mapping(sym) != null - - /** GADT constraint narrows bounds of at least one variable */ - def isNarrowing: Boolean = wasConstrained - /** See [[ConstraintHandling.approximation]] */ def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = { - val res = - approximation(tvarOrError(sym).origin, fromBelow, maxLevel) match - case tpr: TypeParamRef => - // Here we do externalization when the returned type is a TypeParamRef, - // b/c ConstraintHandling.approximation may return internal types when - // the type variable is instantiated. See #15531. - externalize(tpr) - case tp => tp - - gadts.println(i"approximating $sym ~> $res") - res + approximation(gadt.tvarOrError(sym).origin, fromBelow, maxLevel).match + case tpr: TypeParamRef => + // Here we do externalization when the returned type is a TypeParamRef, + // b/c ConstraintHandling.approximation may return internal types when + // the type variable is instantiated. See #15531. + gadt.externalize(tpr) + case tp => tp + .showing(i"approximating $sym ~> $result", gadts) } - def symbols: List[Symbol] = mapping.keys + def fresh: GadtState = GadtState(gadt) - def fresh: GadtConstraint = new ProperGadtConstraint(myConstraint, mapping, reverseMapping, wasConstrained) + /** Restore the GadtConstraint state. */ + def restore(gadt: GadtConstraint): Unit = this.gadt = gadt - /** Restore the state from other [[GadtConstraint]], probably copied using [[fresh]] */ - def restore(other: GadtConstraint): Unit = - this.myConstraint = other.myConstraint - this.mapping = other.mapping - this.reverseMapping = other.reverseMapping - this.wasConstrained = other.wasConstrained + inline def rollbackGadtUnless(inline op: Boolean): Boolean = + val saved = gadt + var result = false + try result = op + finally if !result then restore(saved) + result // ---- Protected/internal ----------------------------------------------- - override protected def constraint = myConstraint - override protected def constraint_=(c: Constraint) = myConstraint = c + override protected def constraint = gadt.constraint + override protected def constraint_=(c: Constraint) = gadt = gadt.withConstraint(c) override protected def isSub(tp1: Type, tp2: Type)(using Context): Boolean = TypeComparer.isSubType(tp1, tp2) override protected def isSame(tp1: Type, tp2: Type)(using Context): Boolean = TypeComparer.isSameType(tp1, tp2) - override def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = - externalize(constraint.nonParamBounds(param)).bounds - - override def fullLowerBound(param: TypeParamRef)(using Context): Type = - constraint.minLower(param).foldLeft(nonParamBounds(param).lo) { - (t, u) => t | externalize(u) - } - - override def fullUpperBound(param: TypeParamRef)(using Context): Type = - constraint.minUpper(param).foldLeft(nonParamBounds(param).hi) { (t, u) => - val eu = externalize(u) - // Any as the upper bound means "no bound", but if F is higher-kinded, - // Any & F = F[_]; this is wrong for us so we need to short-circuit - if t.isAny then eu else t & eu - } - - // ---- Private ---------------------------------------------------------- - - private def externalize(tp: Type, theMap: TypeMap | Null = null)(using Context): Type = tp match - case param: TypeParamRef => reverseMapping(param) match - case sym: Symbol => sym.typeRef - case null => param - case tp: TypeAlias => tp.derivedAlias(externalize(tp.alias, theMap)) - case tp => (if theMap == null then ExternalizeMap() else theMap).mapOver(tp) - - private class ExternalizeMap(using Context) extends TypeMap: - def apply(tp: Type): Type = externalize(tp, this)(using mapCtx) - - private def tvarOrError(sym: Symbol)(using Context): TypeVar = - mapping(sym).ensuring(_ != null, i"not a constrainable symbol: $sym").uncheckedNN - - private def containsNoInternalTypes(tp: Type, theAcc: TypeAccumulator[Boolean] | Null = null)(using Context): Boolean = tp match { - case tpr: TypeParamRef => !reverseMapping.contains(tpr) - case tv: TypeVar => !reverseMapping.contains(tv.origin) - case tp => - (if (theAcc != null) theAcc else new ContainsNoInternalTypesAccumulator()).foldOver(true, tp) - } - - private class ContainsNoInternalTypesAccumulator(using Context) extends TypeAccumulator[Boolean] { - override def apply(x: Boolean, tp: Type): Boolean = x && containsNoInternalTypes(tp, this) - } + override def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = gadt.nonParamBounds(param) + override def fullLowerBound(param: TypeParamRef)(using Context): Type = gadt.fullLowerBound(param) + override def fullUpperBound(param: TypeParamRef)(using Context): Type = gadt.fullUpperBound(param) // ---- Debug ------------------------------------------------------------ override def constr = gadtsConstr - - override def toText(printer: Printer): Texts.Text = printer.toText(this) - - /** Provides more information than toText, by showing the underlying Constraint details. */ - def debugBoundsDescription(using Context): String = i"$this\n$constraint" } -private class ProperGadtConstraint ( - myConstraint: Constraint, - mapping: SimpleIdentityMap[Symbol, TypeVar], - reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol], - wasConstrained: Boolean, -) extends ConstraintHandling with GadtConstraint(myConstraint, mapping, reverseMapping, wasConstrained) +// Hide ConstraintHandling within GadtState +private class ProperGadtState(private var myGadt: GadtConstraint) extends ConstraintHandling with GadtState: + def gadt: GadtConstraint = myGadt + def gadt_=(gadt: GadtConstraint): Unit = myGadt = gadt diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index 062ddd5e846c..60ebc95e7bed 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -100,16 +100,16 @@ object MatchTypeTrace: case TryReduce(scrut: Type) => i" trying to reduce $scrut" case NoMatches(scrut, cases) => - i""" failed since selector $scrut + i""" failed since selector $scrut | matches none of the cases | | ${casesText(cases)}""" case EmptyScrutinee(scrut) => - i""" failed since selector $scrut + i""" failed since selector $scrut | is uninhabited (there are no values of that type).""" case Stuck(scrut, stuckCase, otherCases) => val msg = - i""" failed since selector $scrut + i""" failed since selector $scrut | does not match ${caseText(stuckCase)} | and cannot be shown to be disjoint from it either.""" if otherCases.length == 0 then msg @@ -121,14 +121,14 @@ object MatchTypeTrace: | ${casesText(otherCases)}""" case NoInstance(scrut, stuckCase, fails) => def params = if fails.length == 1 then "parameter" else "parameters" - i""" failed since selector $scrut + i""" failed since selector $scrut | does not uniquely determine $params ${fails.map(_._1)}%, % in | ${caseText(stuckCase)} | The computed bounds for the $params are: | ${fails.map((name, bounds) => i"$name$bounds")}%\n %""" def noMatchesText(scrut: Type, cases: List[Type])(using Context): String = - i"""failed since selector $scrut + i"""failed since selector $scrut |matches none of the cases | | ${casesText(cases)}""" diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 33ac3de70767..ea63eb6a419b 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -70,10 +70,14 @@ object Mode { /** We are currently unpickling Scala2 info */ val Scala2Unpickling: Mode = newMode(13, "Scala2Unpickling") - /** We are currently checking bounds to be non-empty, so we should not - * do any widening when computing members of refined types. + /** Signifies one of two possible situations: + * 1. We are currently checking bounds to be non-empty, so we should not + * do any widening when computing members of refined types. + * 2. We are currently checking self type conformance, so we should not + * ignore capture sets added to otherwise pure classes (only needed + * for capture checking). */ - val CheckBounds: Mode = newMode(14, "CheckBounds") + val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") /** Use Scala2 scheme for overloading and implicit resolution */ val OldOverloadingResolution: Mode = newMode(15, "OldOverloadingResolution") @@ -137,4 +141,7 @@ object Mode { * Type `Null` becomes a subtype of non-primitive value types in TypeComparer. */ val RelaxedOverriding: Mode = newMode(30, "RelaxedOverriding") + + /** We are checking the original call of an Inlined node */ + val InlinedCall: Mode = newMode(31, "InlinedCall") } diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index f71c16e82b70..2c968ab9446c 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -300,6 +300,7 @@ object NameKinds { val UniqueInlineName: UniqueNameKind = new UniqueNameKind("$i") val InlineScrutineeName: UniqueNameKind = new UniqueNameKind("$scrutinee") val InlineBinderName: UniqueNameKind = new UniqueNameKind("$proxy") + val MacroNames: UniqueNameKind = new UniqueNameKind("$macro$") /** A kind of unique extension methods; Unlike other unique names, these can be * unmangled. @@ -324,6 +325,8 @@ object NameKinds { val LocalOptInlineLocalObj: UniqueNameKind = new UniqueNameKind("ilo") + val BoundaryName: UniqueNameKind = new UniqueNameKind("boundary") + /** The kind of names of default argument getters */ val DefaultGetterName: NumberedNameKind = new NumberedNameKind(DEFAULTGETTER, "DefaultGetter") { def mkString(underlying: TermName, info: ThisInfo) = { diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index 7c1073852681..04440c9e9b39 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -86,11 +86,17 @@ object NameOps { def isVarPattern: Boolean = testSimple { n => n.length > 0 && { + def isLowerLetterSupplementary: Boolean = + import Character.{isHighSurrogate, isLowSurrogate, isLetter, isLowerCase, isValidCodePoint, toCodePoint} + isHighSurrogate(n(0)) && n.length > 1 && isLowSurrogate(n(1)) && { + val codepoint = toCodePoint(n(0), n(1)) + isValidCodePoint(codepoint) && isLetter(codepoint) && isLowerCase(codepoint) + } val first = n.head - (((first.isLower && first.isLetter) || first == '_') - && (n != false_) - && (n != true_) - && (n != null_)) + ((first.isLower && first.isLetter || first == '_' || isLowerLetterSupplementary) + && n != false_ + && n != true_ + && n != null_) } } || name.is(PatMatGivenVarName) @@ -98,7 +104,7 @@ object NameOps { case raw.NE | raw.LE | raw.GE | EMPTY => false case name: SimpleName => - name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.head) + name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.firstCodePoint) case _ => false } @@ -208,7 +214,7 @@ object NameOps { if str == mustHave then found = true idx + str.length else idx - skip(skip(skip(0, "Impure"), "Erased"), "Context") == suffixStart + skip(skip(0, "Impure"), "Context") == suffixStart && found } @@ -219,10 +225,11 @@ object NameOps { private def checkedFunArity(suffixStart: Int)(using Context): Int = if isFunctionPrefix(suffixStart) then funArity(suffixStart) else -1 - /** Is a function name, i.e one of FunctionXXL, FunctionN, ContextFunctionN, ErasedFunctionN, ErasedContextFunctionN for N >= 0 + /** Is a function name, i.e one of FunctionXXL, FunctionN, ContextFunctionN, ImpureFunctionN, ImpureContextFunctionN for N >= 0 */ def isFunction(using Context): Boolean = - (name eq tpnme.FunctionXXL) || checkedFunArity(functionSuffixStart) >= 0 + (name eq tpnme.FunctionXXL) + || checkedFunArity(functionSuffixStart) >= 0 /** Is a function name * - FunctionN for N >= 0 @@ -235,14 +242,11 @@ object NameOps { isFunctionPrefix(suffixStart, mustHave) && funArity(suffixStart) >= 0 def isContextFunction(using Context): Boolean = isSpecificFunction("Context") - def isErasedFunction(using Context): Boolean = isSpecificFunction("Erased") def isImpureFunction(using Context): Boolean = isSpecificFunction("Impure") /** Is a synthetic function name, i.e. one of * - FunctionN for N > 22 * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N >= 0 - * - ErasedContextFunctionN for N >= 0 */ def isSyntheticFunction(using Context): Boolean = val suffixStart = functionSuffixStart @@ -352,6 +356,14 @@ object NameOps { val unmangled = kinds.foldLeft(name)(_.unmangle(_)) if (unmangled eq name) name else unmangled.unmangle(kinds) } + + def firstCodePoint: Int = + val first = name.firstPart + import Character.{isHighSurrogate, isLowSurrogate, isValidCodePoint, toCodePoint} + if isHighSurrogate(first(0)) && first.length > 1 && isLowSurrogate(first(1)) then + val codepoint = toCodePoint(first(0), first(1)) + if isValidCodePoint(codepoint) then codepoint else first(0) + else first(0) } extension (name: TermName) { diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index fa0a89349b5e..dc09edd79781 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -42,10 +42,10 @@ object NamerOps: case Nil => resultType case TermSymbols(params) :: paramss1 => - val (isContextual, isImplicit, isErased) = - if params.isEmpty then (false, false, false) - else (params.head.is(Given), params.head.is(Implicit), params.head.is(Erased)) - val make = MethodType.companion(isContextual = isContextual, isImplicit = isImplicit, isErased = isErased) + val (isContextual, isImplicit) = + if params.isEmpty then (false, false) + else (params.head.is(Given), params.head.is(Implicit)) + val make = MethodType.companion(isContextual = isContextual, isImplicit = isImplicit) if isJava then for param <- params do if param.info.isDirectRef(defn.ObjectClass) then param.info = defn.AnyType @@ -67,11 +67,11 @@ object NamerOps: completer.withSourceModule(findModuleBuddy(name.sourceModuleName, scope)) /** Find moduleClass/sourceModule in effective scope */ - def findModuleBuddy(name: Name, scope: Scope)(using Context) = { - val it = scope.lookupAll(name).filter(_.is(Module)) - if (it.hasNext) it.next() - else NoSymbol.assertingErrorsReported(s"no companion $name in $scope") - } + def findModuleBuddy(name: Name, scope: Scope, alternate: Name = EmptyTermName)(using Context): Symbol = + var it = scope.lookupAll(name).filter(_.is(Module)) + if !alternate.isEmpty then it ++= scope.lookupAll(alternate).filter(_.is(Module)) + if it.hasNext then it.next() + else NoSymbol.assertingErrorsReported(em"no companion $name in $scope") /** If a class has one of these flags, it does not get a constructor companion */ private val NoConstructorProxyNeededFlags = Abstract | Trait | Case | Synthetic | Module | Invisible @@ -212,11 +212,11 @@ object NamerOps: * by (ab?)-using GADT constraints. See pos/i941.scala. */ def linkConstructorParams(sym: Symbol, tparams: List[Symbol], rhsCtx: Context)(using Context): Unit = - rhsCtx.gadt.addToConstraint(tparams) + rhsCtx.gadtState.addToConstraint(tparams) tparams.lazyZip(sym.owner.typeParams).foreach { (psym, tparam) => val tr = tparam.typeRef - rhsCtx.gadt.addBound(psym, tr, isUpper = false) - rhsCtx.gadt.addBound(psym, tr, isUpper = true) + rhsCtx.gadtState.addBound(psym, tr, isUpper = false) + rhsCtx.gadtState.addBound(psym, tr, isUpper = true) } end NamerOps diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index f13c3a184bf9..1e08379b57f0 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -15,8 +15,8 @@ import scala.annotation.internal.sharable object Names { import NameKinds._ - /** Things that can be turned into names with `totermName` and `toTypeName` - * Decorators defines implements these as extension methods for strings. + /** Things that can be turned into names with `toTermName` and `toTypeName`. + * Decorators implements these as extension methods for strings. */ type PreName = Name | String @@ -25,7 +25,7 @@ object Names { */ abstract class Designator - /** A name if either a term name or a type name. Term names can be simple + /** A name is either a term name or a type name. Term names can be simple * or derived. A simple term name is essentially an interned string stored * in a name table. A derived term name adds a tag, and possibly a number * or a further simple name to some other name. diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 1341fac7d735..0328cea9b3ca 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -16,27 +16,34 @@ import cc.{CapturingType, derivedCapturingType} object OrderingConstraint { - type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] + /** If true, use reverse dependencies in `replace` to avoid checking the bounds + * of all parameters in the constraint. This can speed things up, but there are some + * rare corner cases where reverse dependencies miss a parameter. Specifically, + * if a constraint contains a free reference to TypeParam P and afterwards the + * same P is added as a bound variable to the constraint, a backwards link would + * then become necessary at this point but is missing. This causes two CB projects + * to fail when reverse dependencies are checked (parboiled2 and perspective). + * In these rare cases `replace` could behave differently when optimized. However, + * no deviation was found in the two projects. It is not clear what the "right" + * behavior of `replace` should be in these cases. Normally, PolyTypes added + * to constraints are supposed to be fresh, so that would mean that the behavior + * with optimizeReplace = true would be correct. But the previous behavior without + * reverse dependency checking corresponds to `optimizeReplace = false`. This behavior + * makes sense if we assume that the added polytype was simply added too late, so we + * want to establish the link between newly bound variable and pre-existing reference. + */ + private final val optimizeReplace = true + + private type ArrayValuedMap[T] = SimpleIdentityMap[TypeLambda, Array[T]] /** The type of `OrderingConstraint#boundsMap` */ - type ParamBounds = ArrayValuedMap[Type] + private type ParamBounds = ArrayValuedMap[Type] /** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */ - type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] - - /** A new constraint with given maps and given set of hard typevars */ - private def newConstraint( - boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering, - hardVars: TypeVars)(using Context) : OrderingConstraint = - if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then - empty - else - val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) - if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) - result + private type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] /** A lens for updating a single entry array in one of the three constraint maps */ - abstract class ConstraintLens[T <: AnyRef: ClassTag] { + private abstract class ConstraintLens[T <: AnyRef: ClassTag] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[T] | Null def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[T])(using Context): OrderingConstraint def initial: T @@ -47,7 +54,7 @@ object OrderingConstraint { } /** The `current` constraint but with the entry for `param` updated to `entry`. - * `current` is used linearly. If it is different from `prev` it is + * `current` is used linearly. If it is different from `prev` then `current` is * known to be dead after the call. Hence it is OK to update destructively * parts of `current` which are not shared by `prev`. */ @@ -89,27 +96,27 @@ object OrderingConstraint { map(prev, current, param.binder, param.paramNum, f) } - val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { + private val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[Type] | Null = c.boundsMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[Type])(using Context): OrderingConstraint = - newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap, c.hardVars) + c.newConstraint(boundsMap = c.boundsMap.updated(poly, entries)) def initial = NoType } - val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.lowerMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap, c.hardVars) + c.newConstraint(lowerMap = c.lowerMap.updated(poly, entries)) def initial = Nil } - val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.upperMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = - newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries), c.hardVars) + c.newConstraint(upperMap = c.upperMap.updated(poly, entries)) def initial = Nil } @@ -143,11 +150,27 @@ class OrderingConstraint(private val boundsMap: ParamBounds, private val lowerMap : ParamOrdering, private val upperMap : ParamOrdering, private val hardVars : TypeVars) extends Constraint { + thisConstraint => import UnificationDirection.* type This = OrderingConstraint + /** A new constraint with given maps and given set of hard typevars */ + private def newConstraint( + boundsMap: ParamBounds = this.boundsMap, + lowerMap: ParamOrdering = this.lowerMap, + upperMap: ParamOrdering = this.upperMap, + hardVars: TypeVars = this.hardVars)(using Context) : OrderingConstraint = + if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then + empty + else + val result = new OrderingConstraint(boundsMap, lowerMap, upperMap, hardVars) + if ctx.run != null then ctx.run.nn.recordConstraintSize(result, result.boundsMap.size) + result.coDeps = this.coDeps + result.contraDeps = this.contraDeps + result + // ----------- Basic indices -------------------------------------------------- /** The number of type parameters in the given entry array */ @@ -201,6 +224,17 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def exclusiveUpper(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] = upper(param).filterNot(isLess(butNot, _)) + def bounds(param: TypeParamRef)(using Context): TypeBounds = { + val e = entry(param) + if (e.exists) e.bounds + else { + // TODO: should we change the type of paramInfos to nullable? + val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos + if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala + else TypeBounds.empty + } + } + // ---------- Info related to TypeParamRefs ------------------------------------------- def isLess(param1: TypeParamRef, param2: TypeParamRef): Boolean = @@ -217,6 +251,199 @@ class OrderingConstraint(private val boundsMap: ParamBounds, if tvar == null then NoType else tvar +// ------------- Type parameter dependencies ---------------------------------------- + + private type ReverseDeps = SimpleIdentityMap[TypeParamRef, SimpleIdentitySet[TypeParamRef]] + + /** A map that associates type parameters of this constraint with all other type + * parameters that refer to them in their bounds covariantly, such that, if the + * type parameter is instantiated to a larger type, the constraint would be narrowed + * (i.e. solution set changes other than simply being made larger). + */ + private var coDeps: ReverseDeps = SimpleIdentityMap.empty + + /** A map that associates type parameters of this constraint with all other type + * parameters that refer to them in their bounds covariantly, such that, if the + * type parameter is instantiated to a smaller type, the constraint would be narrowed. + * (i.e. solution set changes other than simply being made larger). + */ + private var contraDeps: ReverseDeps = SimpleIdentityMap.empty + + /** Null-safe indexing */ + extension (deps: ReverseDeps) def at(param: TypeParamRef): SimpleIdentitySet[TypeParamRef] = + val result = deps(param) + if null == result // swapped operand order important since `==` is overloaded in `SimpleIdentitySet` + then SimpleIdentitySet.empty + else result + + override def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean = + def origin(tv: TypeVar) = + assert(!instType(tv).exists) + tv.origin + val param = origin(tv) + val excluded = except.map(origin) + val qualifies: TypeParamRef => Boolean = !excluded.contains(_) + def test(deps: ReverseDeps, lens: ConstraintLens[List[TypeParamRef]]) = + deps.at(param).exists(qualifies) + || lens(this, tv.origin.binder, tv.origin.paramNum).exists(qualifies) + if co then test(coDeps, upperLens) else test(contraDeps, lowerLens) + + /** Modify traversals in two respects: + * - when encountering an application C[Ts], where C is a type variable or parameter + * that has an instantiation in this constraint, assume the type parameters of + * the instantiation instead of the type parameters of C when traversing the + * arguments Ts. That can make a difference for the variance in which an argument + * is traversed. Example constraint: + * + * constrained types: C[X], A + * A >: C[B] + * C := Option + * + * Here, B is traversed with variance +1 instead of 0. Test case: pos/t3152.scala + * + * - When typing a prefx, don't avoid negative variances. This matters only for the + * corner case where a parameter is instantiated to Nothing (see comment in + * TypeAccumulator#applyToPrefix). When determining instantiation directions in + * interpolations (which is what dependency variances are for), it can be ignored. + */ + private trait ConstraintAwareTraversal[T] extends TypeAccumulator[T]: + + /** Does `param` have bounds in the current constraint? */ + protected def hasBounds(param: TypeParamRef): Boolean = entry(param).isInstanceOf[TypeBounds] + + override def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = + def tparams(tycon: Type): List[ParamInfo] = tycon match + case tycon: TypeVar if !tycon.inst.exists => tparams(tycon.origin) + case tycon: TypeParamRef if !hasBounds(tycon) => + val entryParams = entry(tycon).typeParams + if entryParams.nonEmpty then entryParams + else tp.tyconTypeParams + case _ => tp.tyconTypeParams + tparams(tp.tycon) + + override def applyToPrefix(x: T, tp: NamedType): T = + this(x, tp.prefix) + end ConstraintAwareTraversal + + /** A type traverser that adjust dependencies originating from a given type + * @param ignoreBinding if not null, a parameter that is assumed to be still uninstantiated. + * This is necessary to handle replacements. + */ + private class Adjuster(srcParam: TypeParamRef, ignoreBinding: TypeParamRef | Null)(using Context) + extends TypeTraverser, ConstraintAwareTraversal[Unit]: + + var add: Boolean = compiletime.uninitialized + val seen = util.HashSet[LazyRef]() + + override protected def hasBounds(param: TypeParamRef) = + (param eq ignoreBinding) || super.hasBounds(param) + + def update(deps: ReverseDeps, referenced: TypeParamRef): ReverseDeps = + val prev = deps.at(referenced) + val newSet = if add then prev + srcParam else prev - srcParam + if newSet.isEmpty then deps.remove(referenced) + else deps.updated(referenced, newSet) + + def traverse(t: Type) = try + t match + case param: TypeParamRef => + if hasBounds(param) then + if variance >= 0 then coDeps = update(coDeps, param) + if variance <= 0 then contraDeps = update(contraDeps, param) + else + traverse(entry(param)) + case tp: LazyRef => + if !seen.contains(tp) then + seen += tp + traverse(tp.ref) + case _ => traverseChildren(t) + catch case ex: Throwable => handleRecursive("adjust", t.show, ex) + end Adjuster + + /** Adjust dependencies to account for the delta of previous entry `prevEntry` + * and the new bound `entry` for the type parameter `srcParam`. + */ + def adjustDeps(entry: Type | Null, prevEntry: Type | Null, srcParam: TypeParamRef, ignoreBinding: TypeParamRef | Null = null)(using Context): this.type = + val adjuster = new Adjuster(srcParam, ignoreBinding) + + /** Adjust reverse dependencies of all type parameters referenced by `bound` + * @param isLower `bound` is a lower bound + * @param add if true, add referenced variables to dependencoes, otherwise drop them. + */ + def adjustReferenced(bound: Type, isLower: Boolean, add: Boolean) = + adjuster.variance = if isLower then 1 else -1 + adjuster.add = add + adjuster.seen.clear(resetToInitial = false) + adjuster.traverse(bound) + + /** Use an optimized strategy to adjust dependencies to account for the delta + * of previous bound `prevBound` and new bound `bound`: If `prevBound` is some + * and/or prefix of `bound`, and `baseCase` is true, just add the new parts of `bound`. + * @param isLower `bound` and `prevBound` are lower bounds + * @return true iff the delta strategy succeeded, false if it failed in which case + * the constraint is left unchanged. + */ + def adjustDelta(bound: Type, prevBound: Type, isLower: Boolean, baseCase: => Boolean): Boolean = + if bound eq prevBound then + baseCase + else bound match + case bound: AndOrType => + adjustDelta(bound.tp1, prevBound, isLower, baseCase) && { + adjustReferenced(bound.tp2, isLower, add = true) + true + } + case _ => false + + /** Add or remove depenencies referenced in `bounds`. + * @param add if true, dependecies are added, otherwise they are removed + */ + def adjustBounds(bounds: TypeBounds, add: Boolean) = + adjustReferenced(bounds.lo, isLower = true, add) + adjustReferenced(bounds.hi, isLower = false, add) + + entry match + case entry @ TypeBounds(lo, hi) => + prevEntry match + case prevEntry @ TypeBounds(plo, phi) => + if !adjustDelta(lo, plo, isLower = true, + adjustDelta(hi, phi, isLower = false, true)) + then + adjustBounds(prevEntry, add = false) + adjustBounds(entry, add = true) + case _ => + adjustBounds(entry, add = true) + case _ => + prevEntry match + case prevEntry: TypeBounds => + adjustBounds(prevEntry, add = false) + case _ => + dropDeps(srcParam) // srcParam is instantiated, so its dependencies can be dropped + this + end adjustDeps + + /** Adjust dependencies to account for adding or dropping all `entries` associated + * with `poly`. + * @param add if true, entries is added, otherwise it is dropped + */ + def adjustDeps(poly: TypeLambda, entries: Array[Type], add: Boolean)(using Context): this.type = + for n <- 0 until paramCount(entries) do + if add + then adjustDeps(entries(n), NoType, poly.paramRefs(n)) + else adjustDeps(NoType, entries(n), poly.paramRefs(n)) + this + + /** Remove all reverse dependencies of `param` */ + def dropDeps(param: TypeParamRef)(using Context): Unit = + coDeps = coDeps.remove(param) + contraDeps = contraDeps.remove(param) + + /** A string representing the two dependency maps */ + def depsToString(using Context): String = + def depsStr(deps: ReverseDeps): String = + def depStr(param: TypeParamRef) = i"$param --> ${deps.at(param).toList}%, %" + if deps.isEmpty then "" else i"\n ${deps.toList.map((k, v) => depStr(k))}%\n %" + i" co-deps:${depsStr(coDeps)}\n contra-deps:${depsStr(contraDeps)}\n" + // ---------- Adding TypeLambdas -------------------------------------------------- /** The bound type `tp` without constrained parameters which are clearly @@ -282,7 +509,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val entries1 = new Array[Type](nparams * 2) poly.paramInfos.copyToArray(entries1, 0) tvars.copyToArray(entries1, nparams) - newConstraint(boundsMap.updated(poly, entries1), lowerMap, upperMap, hardVars).init(poly) + newConstraint(boundsMap = this.boundsMap.updated(poly, entries1)) + .init(poly) } /** Split dependent parameters off the bounds for parameters in `poly`. @@ -298,31 +526,23 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val param = poly.paramRefs(i) val bounds = dropWildcards(nonParamBounds(param)) val stripped = stripParams(bounds, todos, isUpper = true) - current = updateEntry(current, param, stripped) + current = boundsLens.update(this, current, param, stripped) while todos.nonEmpty do current = todos.head(current, param) todos.dropInPlace(1) i += 1 } - current.checkNonCyclic() + current.adjustDeps(poly, current.boundsMap(poly).nn, add = true) + .checkWellFormed() } // ---------- Updates ------------------------------------------------------------ - /** If `inst` is a TypeBounds, make sure it does not contain toplevel - * references to `param` (see `Constraint#occursAtToplevel` for a definition - * of "toplevel"). - * Any such references are replaced by `Nothing` in the lower bound and `Any` - * in the upper bound. - * References can be direct or indirect through instantiations of other - * parameters in the constraint. - */ - private def ensureNonCyclic(param: TypeParamRef, inst: Type)(using Context): Type = - - def recur(tp: Type, fromBelow: Boolean): Type = tp match + def validBoundFor(param: TypeParamRef, bound: Type, isUpper: Boolean)(using Context): Type = + def recur(tp: Type): Type = tp match case tp: AndOrType => - val r1 = recur(tp.tp1, fromBelow) - val r2 = recur(tp.tp2, fromBelow) + val r1 = recur(tp.tp1) + val r2 = recur(tp.tp2) if (r1 eq tp.tp1) && (r2 eq tp.tp2) then tp else tp.match case tp: OrType => @@ -331,35 +551,34 @@ class OrderingConstraint(private val boundsMap: ParamBounds, r1 & r2 case tp: TypeParamRef => if tp eq param then - if fromBelow then defn.NothingType else defn.AnyType + if isUpper then defn.AnyType else defn.NothingType else entry(tp) match case NoType => tp - case TypeBounds(lo, hi) => if lo eq hi then recur(lo, fromBelow) else tp - case inst => recur(inst, fromBelow) + case TypeBounds(lo, hi) => if lo eq hi then recur(lo) else tp + case inst => recur(inst) case tp: TypeVar => - val underlying1 = recur(tp.underlying, fromBelow) + val underlying1 = recur(tp.underlying) if underlying1 ne tp.underlying then underlying1 else tp case CapturingType(parent, refs) => - val parent1 = recur(parent, fromBelow) + val parent1 = recur(parent) if parent1 ne parent then tp.derivedCapturingType(parent1, refs) else tp case tp: AnnotatedType => - val parent1 = recur(tp.parent, fromBelow) + val parent1 = recur(tp.parent) if parent1 ne tp.parent then tp.derivedAnnotatedType(parent1, tp.annot) else tp case _ => val tp1 = tp.dealiasKeepAnnots if tp1 ne tp then - val tp2 = recur(tp1, fromBelow) + val tp2 = recur(tp1) if tp2 ne tp1 then tp2 else tp else tp - inst match - case bounds: TypeBounds => - bounds.derivedTypeBounds( - recur(bounds.lo, fromBelow = true), - recur(bounds.hi, fromBelow = false)) - case _ => - inst - end ensureNonCyclic + recur(bound) + end validBoundFor + + def validBoundsFor(param: TypeParamRef, bounds: TypeBounds)(using Context): Type = + bounds.derivedTypeBounds( + validBoundFor(param, bounds.lo, isUpper = false), + validBoundFor(param, bounds.hi, isUpper = true)) /** Add the fact `param1 <: param2` to the constraint `current` and propagate * `<:<` relationships between parameters ("edges") but not bounds. @@ -418,7 +637,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case param: TypeParamRef if contains(param) => param :: (if (isUpper) upper(param) else lower(param)) case tp: AndType if isUpper => - dependentParams(tp.tp1, isUpper) | (dependentParams(tp.tp2, isUpper)) + dependentParams(tp.tp1, isUpper).setUnion(dependentParams(tp.tp2, isUpper)) case tp: OrType if !isUpper => dependentParams(tp.tp1, isUpper).intersect(dependentParams(tp.tp2, isUpper)) case EtaExpansion(tycon) => @@ -426,10 +645,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case _ => Nil - private def updateEntry(current: This, param: TypeParamRef, tp: Type)(using Context): This = { - if Config.checkNoWildcardsInConstraint then assert(!tp.containsWildcardTypes) - var current1 = boundsLens.update(this, current, param, tp) - tp match { + private def updateEntry(current: This, param: TypeParamRef, newEntry: Type)(using Context): This = { + if Config.checkNoWildcardsInConstraint then assert(!newEntry.containsWildcardTypes) + val oldEntry = current.entry(param) + var current1 = boundsLens.update(this, current, param, newEntry) + .adjustDeps(newEntry, oldEntry, param) + newEntry match { case TypeBounds(lo, hi) => for p <- dependentParams(lo, isUpper = false) do current1 = order(current1, p, param) @@ -440,12 +661,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current1 } - /** The public version of `updateEntry`. Guarantees that there are no cycles */ def updateEntry(param: TypeParamRef, tp: Type)(using Context): This = - updateEntry(this, param, ensureNonCyclic(param, tp)).checkNonCyclic() + updateEntry(this, param, tp).checkWellFormed() def addLess(param1: TypeParamRef, param2: TypeParamRef, direction: UnificationDirection)(using Context): This = - order(this, param1, param2, direction).checkNonCyclic() + order(this, param1, param2, direction).checkWellFormed() // ---------- Replacements and Removals ------------------------------------- @@ -455,24 +675,80 @@ class OrderingConstraint(private val boundsMap: ParamBounds, */ def replace(param: TypeParamRef, tp: Type)(using Context): OrderingConstraint = val replacement = tp.dealiasKeepAnnots.stripTypeVar - if param == replacement then this.checkNonCyclic() + if param == replacement then this.checkWellFormed() else assert(replacement.isValueTypeOrLambda) - var current = - if isRemovable(param.binder) then remove(param.binder) - else updateEntry(this, param, replacement) - - def removeParam(ps: List[TypeParamRef]) = ps.filterConserve(param ne _) - def replaceParam(tp: Type, atPoly: TypeLambda, atIdx: Int): Type = - current.ensureNonCyclic(atPoly.paramRefs(atIdx), tp.substParam(param, replacement)) - - current.foreachParam { (p, i) => - current = boundsLens.map(this, current, p, i, replaceParam(_, p, i)) - current = lowerLens.map(this, current, p, i, removeParam) - current = upperLens.map(this, current, p, i, removeParam) - } - current.checkNonCyclic() + val replacedTypeVar = typeVarOfParam(param) + //println(i"replace $param with $replacement in $this") + + def mapReplacedTypeVarTo(to: Type) = new TypeMap: + override def apply(t: Type): Type = + if (t eq replacedTypeVar) && t.exists then to else mapOver(t) + + val coDepsOfParam = coDeps.at(param) + val contraDepsOfParam = contraDeps.at(param) + + var current = updateEntry(this, param, replacement) + // Need to update param early to avoid infinite recursion on instantiation. + // See i16311.scala for a test case. On the other hand, for the purposes of + // dependency adjustment, we need to pretend that `param` is still unbound. + // We achieve that by passing a `ignoreBinding = param` to `adjustDeps` below. + + def removeParamFrom(ps: List[TypeParamRef]) = + ps.filterConserve(param ne _) + + for lo <- lower(param) do + current = upperLens.map(this, current, lo, removeParamFrom) + for hi <- upper(param) do + current = lowerLens.map(this, current, hi, removeParamFrom) + + def replaceParamIn(other: TypeParamRef) = + val oldEntry = current.entry(other) + val newEntry = oldEntry.substParam(param, replacement) match + case tp: TypeBounds => current.validBoundsFor(other, tp) + case tp => tp + current = boundsLens.update(this, current, other, newEntry) + var oldDepEntry = oldEntry + var newDepEntry = newEntry + replacedTypeVar match + case tvar: TypeVar => + if tvar.inst.exists // `isInstantiated` would use ctx.typerState.constraint rather than the current constraint + then + // If the type variable has been instantiated, we need to forget about + // the instantiation for old dependencies. + // I.e. to find out what the old entry was, we should not follow + // the newly instantiated type variable but assume the type variable's origin `param`. + // An example where this happens is if `replace` is called from TypeVar's `instantiateWith`. + oldDepEntry = mapReplacedTypeVarTo(param)(oldDepEntry) + else + // If the type variable has not been instantiated, we need to replace references to it + // in the new entry by `replacement`. Otherwise we would get stuck in an uninstantiated + // type variable. + // An example where this happens is if `replace` is called from unify. + newDepEntry = mapReplacedTypeVarTo(replacement)(newDepEntry) + case _ => + if oldDepEntry ne newDepEntry then + current.adjustDeps(newDepEntry, oldDepEntry, other, ignoreBinding = param) + end replaceParamIn + + if optimizeReplace then + current.foreachParam { (p, i) => + val other = p.paramRefs(i) + entry(other) match + case _: TypeBounds => + if coDepsOfParam.contains(other) || contraDepsOfParam.contains(other) then + replaceParamIn(other) + case _ => replaceParamIn(other) + } + else + current.foreachParam { (p, i) => + val other = p.paramRefs(i) + if other != param then replaceParamIn(other) + } + if isRemovable(param.binder) then current = current.remove(param.binder) + current.dropDeps(param) + current.checkWellFormed() end replace def remove(pt: TypeLambda)(using Context): This = { @@ -485,7 +761,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } val hardVars1 = pt.paramRefs.foldLeft(hardVars)((hvs, param) => hvs - typeVarOfParam(param)) newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap), hardVars1) - .checkNonCyclic() + .adjustDeps(pt, boundsMap(pt).nn, add = false) + .checkWellFormed() } def isRemovable(pt: TypeLambda): Boolean = { @@ -511,7 +788,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def swapKey[T](m: ArrayValuedMap[T]) = val info = m(from) if info == null then m else m.remove(from).updated(to, info) - var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap), hardVars) + var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap)) def subst[T <: Type](x: T): T = x.subst(from, to).asInstanceOf[T] current.foreachParam {(p, i) => current = boundsLens.map(this, current, p, i, subst) @@ -519,12 +796,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current = upperLens.map(this, current, p, i, _.map(subst)) } constr.println(i"renamed $this to $current") - current.checkNonCyclic() + current.checkWellFormed() def isHard(tv: TypeVar) = hardVars.contains(tv) def withHard(tv: TypeVar)(using Context) = - newConstraint(boundsMap, lowerMap, upperMap, hardVars + tv) + newConstraint(hardVars = this.hardVars + tv) def instType(tvar: TypeVar): Type = entry(tvar.origin) match case _: TypeBounds => NoType @@ -551,6 +828,26 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(tvar.origin == param, i"mismatch $tvar, $param") case _ => + def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = + def occurs(tp: Type)(using Context): Boolean = tp match + case tp: AndOrType => + occurs(tp.tp1) || occurs(tp.tp2) + case tp: TypeParamRef => + (tp eq param) || entry(tp).match + case NoType => false + case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) + case inst => occurs(inst) + case tp: TypeVar => + occurs(tp.underlying) + case TypeBounds(lo, hi) => + occurs(lo) || occurs(hi) + case _ => + val tp1 = tp.dealias + (tp1 ne tp) && occurs(tp1) + + occurs(inst) + end occursAtToplevel + // ---------- Exploration -------------------------------------------------------- def domainLambdas: List[TypeLambda] = boundsMap.keys @@ -603,7 +900,57 @@ class OrderingConstraint(private val boundsMap: ParamBounds, // ---------- Checking ----------------------------------------------- - def checkNonCyclic()(using Context): this.type = + def checkWellFormed()(using Context): this.type = + + /** Check that each dependency A -> B in coDeps and contraDeps corresponds to + * a reference to A at the right variance in the entry of B. + */ + def checkBackward(deps: ReverseDeps, depsName: String, v: Int)(using Context): Unit = + deps.foreachBinding { (param, params) => + for srcParam <- params do + assert(contains(srcParam) && occursAtVariance(param, v, in = entry(srcParam)), + i"wrong $depsName backwards reference $param -> $srcParam in $thisConstraint") + } + + /** A type traverser that checks that all references bound in the constraint + * are accounted for in coDeps and/or contraDeps. + */ + def checkForward(srcParam: TypeParamRef)(using Context) = + new TypeTraverser with ConstraintAwareTraversal[Unit]: + val seen = util.HashSet[LazyRef]() + def traverse(t: Type): Unit = t match + case param: TypeParamRef if param ne srcParam => + def check(deps: ReverseDeps, directDeps: List[TypeParamRef], depsName: String) = + assert(deps.at(param).contains(srcParam) || directDeps.contains(srcParam), + i"missing $depsName backwards reference $param -> $srcParam in $thisConstraint") + entry(param) match + case _: TypeBounds => + if variance >= 0 then check(contraDeps, upper(param), "contra") + if variance <= 0 then check(coDeps, lower(param), "co") + case tp => + traverse(tp) + case tp: LazyRef => + if !seen.contains(tp) then + seen += tp + traverse(tp.ref) + case _ => traverseChildren(t) + + /** Does `param` occur at variance `v` or else at variance 0 in entry `in`? */ + def occursAtVariance(param: TypeParamRef, v: Int, in: Type)(using Context): Boolean = + val test = new TypeAccumulator[Boolean] with ConstraintAwareTraversal[Boolean]: + def apply(x: Boolean, t: Type): Boolean = + if x then true + else t match + case t: TypeParamRef => + entry(t) match + case _: TypeBounds => + t == param && (variance == 0 || variance == v) + case e => + apply(x, e) + case _ => + foldOver(x, t) + test(false, in) + if Config.checkConstraintsNonCyclic then domainParams.foreach { param => val inst = entry(param) @@ -612,28 +959,13 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(!occursAtToplevel(param, inst), s"cyclic bound for $param: ${inst.show} in ${this.show}") } - this - - def occursAtToplevel(param: TypeParamRef, inst: Type)(using Context): Boolean = - - def occurs(tp: Type)(using Context): Boolean = tp match - case tp: AndOrType => - occurs(tp.tp1) || occurs(tp.tp2) - case tp: TypeParamRef => - (tp eq param) || entry(tp).match - case NoType => false - case TypeBounds(lo, hi) => (lo eq hi) && occurs(lo) - case inst => occurs(inst) - case tp: TypeVar => - occurs(tp.underlying) - case TypeBounds(lo, hi) => - occurs(lo) || occurs(hi) - case _ => - val tp1 = tp.dealias - (tp1 ne tp) && occurs(tp1) + if Config.checkConstraintDeps || ctx.settings.YcheckConstraintDeps.value then + checkBackward(coDeps, "co", -1) + checkBackward(contraDeps, "contra", +1) + domainParams.foreach(p => if contains(p) then checkForward(p).traverse(entry(p))) - occurs(inst) - end occursAtToplevel + this + end checkWellFormed override def checkClosed()(using Context): Unit = @@ -663,13 +995,16 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val constrainedText = " constrained types = " + domainLambdas.mkString("\n") val boundsText = - " bounds = " + { + "\n bounds = " + { val assocs = for (param <- domainParams) yield s"${param.binder.paramNames(param.paramNum)}: ${entryText(entry(param))}" assocs.mkString("\n") } - constrainedText + "\n" + boundsText + val depsText = + "\n coDeps = " + coDeps + + "\n contraDeps = " + contraDeps + constrainedText + boundsText + depsText } } diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index ff9a5cd0aed7..5e8a960608e6 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -265,26 +265,26 @@ trait PatternTypeConstrainer { self: TypeComparer => (tp, pt) match { case (AppliedType(tyconS, argsS), AppliedType(tyconP, argsP)) => val saved = state.nn.constraint - val savedGadt = ctx.gadt.fresh val result = - tyconS.typeParams.lazyZip(argsS).lazyZip(argsP).forall { (param, argS, argP) => - val variance = param.paramVarianceSign - if variance == 0 || assumeInvariantRefinement || - // As a special case, when pattern and scrutinee types have the same type constructor, - // we infer better bounds for pattern-bound abstract types. - argP.typeSymbol.isPatternBound && patternTp.classSymbol == scrutineeTp.classSymbol - then - val TypeBounds(loS, hiS) = argS.bounds - val TypeBounds(loP, hiP) = argP.bounds - var res = true - if variance < 1 then res &&= isSubType(loS, hiP) - if variance > -1 then res &&= isSubType(loP, hiS) - res - else true + ctx.gadtState.rollbackGadtUnless { + tyconS.typeParams.lazyZip(argsS).lazyZip(argsP).forall { (param, argS, argP) => + val variance = param.paramVarianceSign + if variance == 0 || assumeInvariantRefinement || + // As a special case, when pattern and scrutinee types have the same type constructor, + // we infer better bounds for pattern-bound abstract types. + argP.typeSymbol.isPatternBound && patternTp.classSymbol == scrutineeTp.classSymbol + then + val TypeBounds(loS, hiS) = argS.bounds + val TypeBounds(loP, hiP) = argP.bounds + var res = true + if variance < 1 then res &&= isSubType(loS, hiP) + if variance > -1 then res &&= isSubType(loP, hiS) + res + else true + } } if !result then constraint = saved - ctx.gadt.restore(savedGadt) result case _ => // Give up if we don't get AppliedType, e.g. if we upcasted to Any. diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala index 44d83dcb5278..ee877fb538d4 100644 --- a/compiler/src/dotty/tools/dotc/core/Periods.scala +++ b/compiler/src/dotty/tools/dotc/core/Periods.scala @@ -20,7 +20,7 @@ object Periods { /** Are all base types in the current period guaranteed to be the same as in period `p`? */ def currentHasSameBaseTypesAs(p: Period)(using Context): Boolean = val period = ctx.period - period == p || + period.code == p.code || period.runId == p.runId && unfusedPhases(period.phaseId).sameBaseTypesStartId == unfusedPhases(p.phaseId).sameBaseTypesStartId @@ -118,7 +118,8 @@ object Periods { apply(rid, 0, PhaseMask) } - final val Nowhere: Period = new Period(0) + inline val NowhereCode = 0 + final val Nowhere: Period = new Period(NowhereCode) final val InitialPeriod: Period = Period(InitialRunId, FirstPhaseId) diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index b4a2dcac1b85..3c4c45ab254a 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -197,12 +197,21 @@ object Phases { config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.toList}") } + /** Unlink `phase` from Denot transformer chain. This means that + * any denotation transformer defined by the phase will not be executed. + */ + def unlinkPhaseAsDenotTransformer(phase: Phase)(using Context) = + for i <- 0 until nextDenotTransformerId.length do + if nextDenotTransformerId(i) == phase.id then + nextDenotTransformerId(i) = nextDenotTransformerId(phase.id + 1) + private var myParserPhase: Phase = _ private var myTyperPhase: Phase = _ private var myPostTyperPhase: Phase = _ private var mySbtExtractDependenciesPhase: Phase = _ private var myPicklerPhase: Phase = _ private var myInliningPhase: Phase = _ + private var myStagingPhase: Phase = _ private var mySplicingPhase: Phase = _ private var myFirstTransformPhase: Phase = _ private var myCollectNullableFieldsPhase: Phase = _ @@ -227,6 +236,7 @@ object Phases { final def sbtExtractDependenciesPhase: Phase = mySbtExtractDependenciesPhase final def picklerPhase: Phase = myPicklerPhase final def inliningPhase: Phase = myInliningPhase + final def stagingPhase: Phase = myStagingPhase final def splicingPhase: Phase = mySplicingPhase final def firstTransformPhase: Phase = myFirstTransformPhase final def collectNullableFieldsPhase: Phase = myCollectNullableFieldsPhase @@ -254,6 +264,7 @@ object Phases { mySbtExtractDependenciesPhase = phaseOfClass(classOf[sbt.ExtractDependencies]) myPicklerPhase = phaseOfClass(classOf[Pickler]) myInliningPhase = phaseOfClass(classOf[Inlining]) + myStagingPhase = phaseOfClass(classOf[Staging]) mySplicingPhase = phaseOfClass(classOf[Splicing]) myFirstTransformPhase = phaseOfClass(classOf[FirstTransform]) myCollectNullableFieldsPhase = phaseOfClass(classOf[CollectNullableFields]) @@ -314,8 +325,8 @@ object Phases { units.map { unit => val unitCtx = ctx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports try run(using unitCtx) - catch case ex: Throwable => - println(s"$ex while running $phaseName on $unit") + catch case ex: Throwable if !ctx.run.enrichedErrorMessage => + println(ctx.run.enrichErrorMessage(s"unhandled exception while running $phaseName on $unit")) throw ex unitCtx.compilationUnit } @@ -441,6 +452,7 @@ object Phases { def sbtExtractDependenciesPhase(using Context): Phase = ctx.base.sbtExtractDependenciesPhase def picklerPhase(using Context): Phase = ctx.base.picklerPhase def inliningPhase(using Context): Phase = ctx.base.inliningPhase + def stagingPhase(using Context): Phase = ctx.base.stagingPhase def splicingPhase(using Context): Phase = ctx.base.splicingPhase def firstTransformPhase(using Context): Phase = ctx.base.firstTransformPhase def refchecksPhase(using Context): Phase = ctx.base.refchecksPhase diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index 863ae4fa6b7f..99076b422358 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -467,7 +467,7 @@ object Scopes { override def size: Int = 0 override def nestingLevel: Int = 0 override def toList(using Context): List[Symbol] = Nil - override def cloneScope(using Context): MutableScope = unsupported("cloneScope") + override def cloneScope(using Context): MutableScope = newScope(nestingLevel) override def lookupEntry(name: Name)(using Context): ScopeEntry | Null = null override def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry | Null = null } diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index c0aca9d8abf4..f2624e26cba5 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -3,6 +3,7 @@ package core import scala.collection.mutable import scala.annotation.switch +import scala.annotation.internal.sharable import Names._ import Symbols._ import Contexts._ @@ -40,7 +41,9 @@ object StdNames { inline val Tuple = "Tuple" inline val Product = "Product" - def sanitize(str: String): String = str.replaceAll("""[<>]""", """\$""").nn + @sharable + private val disallowed = java.util.regex.Pattern.compile("""[<>]""").nn + def sanitize(str: String): String = disallowed.matcher(str).nn.replaceAll("""\$""").nn } abstract class DefinedNames[N <: Name] { @@ -128,6 +131,7 @@ object StdNames { val EXCEPTION_RESULT_PREFIX: N = "exceptionResult" val EXPAND_SEPARATOR: N = str.EXPAND_SEPARATOR val IMPORT: N = "" + val INTO: N = "" val MODULE_SUFFIX: N = str.MODULE_SUFFIX val OPS_PACKAGE: N = "" val OVERLOADED: N = "" @@ -200,6 +204,7 @@ object StdNames { final val Null: N = "Null" final val Object: N = "Object" final val FromJavaObject: N = "" + final val Record: N = "Record" final val Product: N = "Product" final val PartialFunction: N = "PartialFunction" final val PrefixType: N = "PrefixType" @@ -208,6 +213,7 @@ object StdNames { final val Throwable: N = "Throwable" final val IOOBException: N = "IndexOutOfBoundsException" final val FunctionXXL: N = "FunctionXXL" + final val ErasedFunction: N = "ErasedFunction" final val Abs: N = "Abs" final val And: N = "&&" @@ -243,7 +249,6 @@ object StdNames { final val ToString: N = "ToString" final val Xor: N = "^" - final val ClassfileAnnotation: N = "ClassfileAnnotation" final val ClassManifest: N = "ClassManifest" final val Enum: N = "Enum" final val Group: N = "Group" @@ -282,7 +287,7 @@ object StdNames { // ----- Term names ----------------------------------------- // Compiler-internal - val CAPTURE_ROOT: N = "*" + val CAPTURE_ROOT: N = "cap" val CONSTRUCTOR: N = "" val STATIC_CONSTRUCTOR: N = "" val EVT2U: N = "evt2u$" @@ -297,6 +302,7 @@ object StdNames { val THROWS: N = "$throws" val U2EVT: N = "u2evt$" val ALLARGS: N = "$allArgs" + val UPARROW: N = "^" final val Nil: N = "Nil" final val Predef: N = "Predef" @@ -420,6 +426,7 @@ object StdNames { val assert_ : N = "assert" val assume_ : N = "assume" val box: N = "box" + val break: N = "break" val build : N = "build" val bundle: N = "bundle" val bytes: N = "bytes" @@ -501,6 +508,7 @@ object StdNames { val info: N = "info" val inlinedEquals: N = "inlinedEquals" val internal: N = "internal" + val into: N = "into" val isArray: N = "isArray" val isDefinedAt: N = "isDefinedAt" val isDefinedAtImpl: N = "$isDefinedAt" @@ -510,10 +518,12 @@ object StdNames { val isInstanceOfPM: N = "$isInstanceOf$" val java: N = "java" val key: N = "key" + val label: N = "label" val lang: N = "lang" val language: N = "language" val length: N = "length" val lengthCompare: N = "lengthCompare" + val local: N = "local" val longHash: N = "longHash" val macroThis : N = "_this" val macroContext : N = "c" @@ -825,7 +835,7 @@ object StdNames { def newBitmapName(bitmapPrefix: TermName, n: Int): TermName = bitmapPrefix ++ n.toString - def selectorName(n: Int): TermName = "_" + (n + 1) + def selectorName(n: Int): TermName = productAccessorName(n + 1) object primitive { val arrayApply: TermName = "[]apply" @@ -904,6 +914,10 @@ object StdNames { final val VOLATILEkw: N = kw("volatile") final val WHILEkw: N = kw("while") + final val RECORDid: N = "record" + final val VARid: N = "var" + final val YIELDid: N = "yield" + final val BoxedBoolean: N = "java.lang.Boolean" final val BoxedByte: N = "java.lang.Byte" final val BoxedCharacter: N = "java.lang.Character" @@ -936,6 +950,8 @@ object StdNames { final val JavaSerializable: N = "java.io.Serializable" } + + class JavaTermNames extends JavaNames[TermName] { protected def fromString(s: String): TermName = termName(s) } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index d0bf0f4da6dc..b8c17ff61e9e 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -24,7 +24,7 @@ import config.Config import reporting._ import collection.mutable import transform.TypeUtils._ -import cc.{CapturingType, derivedCapturingType} +import cc.{CapturingType, derivedCapturingType, Setup, EventuallyCapturingType, isEventuallyCapturingType} import scala.annotation.internal.sharable @@ -39,7 +39,7 @@ object SymDenotations { final val name: Name, initFlags: FlagSet, initInfo: Type, - initPrivateWithin: Symbol = NoSymbol) extends SingleDenotation(symbol, initInfo) { + initPrivateWithin: Symbol = NoSymbol) extends SingleDenotation(symbol, initInfo, name.isTypeName) { //assert(symbol.id != 4940, name) @@ -168,7 +168,8 @@ object SymDenotations { } } else { - if (myFlags.is(Touched)) throw CyclicReference(this) + if (myFlags.is(Touched)) + throw CyclicReference(this)(using ctx.withOwner(symbol)) myFlags |= Touched atPhase(validFor.firstPhaseId)(completer.complete(this)) } @@ -251,6 +252,18 @@ object SymDenotations { final def filterAnnotations(p: Annotation => Boolean)(using Context): Unit = annotations = annotations.filterConserve(p) + def annotationsCarrying(meta: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): List[Annotation] = + annotations.filterConserve(_.hasOneOfMetaAnnotation(meta, orNoneOf = orNoneOf)) + + def keepAnnotationsCarrying(phase: DenotTransformer, meta: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): Unit = + updateAnnotationsAfter(phase, annotationsCarrying(meta, orNoneOf = orNoneOf)) + + def updateAnnotationsAfter(phase: DenotTransformer, annots: List[Annotation])(using Context): Unit = + if annots ne annotations then + val cpy = copySymDenotation() + cpy.annotations = annots + cpy.installAfter(phase) + /** Optionally, the annotation matching the given class symbol */ final def getAnnotation(cls: Symbol)(using Context): Option[Annotation] = dropOtherAnnotations(annotations, cls) match { @@ -273,7 +286,7 @@ object SymDenotations { /** Add the given annotation without parameters to the annotations of this denotation */ final def addAnnotation(cls: ClassSymbol)(using Context): Unit = - addAnnotation(Annotation(cls)) + addAnnotation(Annotation(cls, symbol.span)) /** Remove annotation with given class from this denotation */ final def removeAnnotation(cls: Symbol)(using Context): Unit = @@ -286,7 +299,7 @@ object SymDenotations { } /** Add all given annotations to this symbol */ - final def addAnnotations(annots: TraversableOnce[Annotation])(using Context): Unit = + final def addAnnotations(annots: IterableOnce[Annotation])(using Context): Unit = annots.iterator.foreach(addAnnotation) @tailrec @@ -505,6 +518,30 @@ object SymDenotations { /** `fullName` where `.' is the separator character */ def fullName(using Context): Name = fullNameSeparated(QualifiedName) + /** The fully qualified name on the JVM of the class corresponding to this symbol. */ + def binaryClassName(using Context): String = + val builder = new StringBuilder + val pkg = enclosingPackageClass + if !pkg.isEffectiveRoot then + builder.append(pkg.fullName.mangledString) + builder.append(".") + val flatName = this.flatName + // Some companion objects are fake (that is, they're a compiler fiction + // that doesn't correspond to a class that exists at runtime), this + // can happen in two cases: + // - If a Java class has static members. + // - If we create constructor proxies for a class (see NamerOps#addConstructorProxies). + // + // In both cases it's may be vital that we don't return the object name. + // For instance, sending it to zinc: when sbt is restarted, zinc will inspect the binary + // dependencies to see if they're still on the classpath, if it + // doesn't find them it will invalidate whatever referenced them, so + // any reference to a fake companion will lead to extra recompilations. + // Instead, use the class name since it's guaranteed to exist at runtime. + val clsFlatName = if isOneOf(JavaDefined | ConstructorProxy) then flatName.stripModuleClassSuffix else flatName + builder.append(clsFlatName.mangledString) + builder.toString + private var myTargetName: Name | Null = null private def computeTargetName(targetNameAnnot: Option[Annotation])(using Context): Name = @@ -542,9 +579,6 @@ object SymDenotations { // ----- Tests ------------------------------------------------- - /** Is this denotation a type? */ - override def isType: Boolean = name.isTypeName - /** Is this denotation a class? */ final def isClass: Boolean = isInstanceOf[ClassDenotation] @@ -748,7 +782,7 @@ object SymDenotations { * So the first call to a stable member might fail and/or produce side effects. */ final def isStableMember(using Context): Boolean = { - def isUnstableValue = isOneOf(UnstableValueFlags) || info.isInstanceOf[ExprType] + def isUnstableValue = isOneOf(UnstableValueFlags) || info.isInstanceOf[ExprType] || isAllOf(InlineParam) isType || is(StableRealizable) || exists && !isUnstableValue } @@ -808,19 +842,14 @@ object SymDenotations { /** Is this a Scala or Java annotation ? */ def isAnnotation(using Context): Boolean = - isClass && derivesFrom(defn.AnnotationClass) + isClass && (derivesFrom(defn.AnnotationClass) || is(JavaAnnotation)) /** Is this symbol a class that extends `java.io.Serializable` ? */ def isSerializable(using Context): Boolean = isClass && derivesFrom(defn.JavaSerializableClass) - /** Is this symbol a class that extends `AnyVal`? */ - final def isValueClass(using Context): Boolean = - val di = initial - di.isClass - && atPhase(di.validFor.firstPhaseId)(di.derivesFrom(defn.AnyValClass)) - // We call derivesFrom at the initial phase both because AnyVal does not exist - // after Erasure and to avoid cyclic references caused by forcing denotations + /** Is this symbol a class that extends `AnyVal`? Overridden in ClassDenotation */ + def isValueClass(using Context): Boolean = false /** Is this symbol a class of which `null` is a value? */ final def isNullableClass(using Context): Boolean = @@ -878,10 +907,13 @@ object SymDenotations { false val cls = owner.enclosingSubClass if !cls.exists then - val encl = if ctx.owner.isConstructor then ctx.owner.enclosingClass.owner.enclosingClass else ctx.owner.enclosingClass - fail(i""" - | Access to protected $this not permitted because enclosing ${encl.showLocated} - | is not a subclass of ${owner.showLocated} where target is defined""") + if pre.termSymbol.isPackageObject && accessWithin(pre.termSymbol.owner) then + true + else + val encl = if ctx.owner.isConstructor then ctx.owner.enclosingClass.owner.enclosingClass else ctx.owner.enclosingClass + fail(i""" + | Access to protected $this not permitted because enclosing ${encl.showLocated} + | is not a subclass of ${owner.showLocated} where target is defined""") else if isType || pre.derivesFrom(cls) || isConstructor || owner.is(ModuleClass) then // allow accesses to types from arbitrary subclasses fixes #4737 // don't perform this check for static members @@ -960,6 +992,26 @@ object SymDenotations { def isSkolem: Boolean = name == nme.SKOLEM + // Java language spec: https://docs.oracle.com/javase/specs/jls/se11/html/jls-15.html#jls-15.12.3 + // Scala 2 spec: https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#signature-polymorphic-methods + def isSignaturePolymorphic(using Context): Boolean = + containsSignaturePolymorphic + && is(JavaDefined) + && hasAnnotation(defn.NativeAnnot) + && atPhase(typerPhase)(symbol.denot).paramSymss.match + case List(List(p)) => p.info.isRepeatedParam + case _ => false + + def containsSignaturePolymorphic(using Context): Boolean = + maybeOwner == defn.MethodHandleClass + || maybeOwner == defn.VarHandleClass + + def originalSignaturePolymorphic(using Context): Denotation = + if containsSignaturePolymorphic && !isSignaturePolymorphic then + val d = owner.info.member(name) + if d.symbol.isSignaturePolymorphic then d else NoDenotation + else NoDenotation + def isInlineMethod(using Context): Boolean = isAllOf(InlineMethod, butNot = Accessor) @@ -1053,6 +1105,7 @@ object SymDenotations { case tp: Symbol => sourceOfSelf(tp.info) case tp: RefinedType => sourceOfSelf(tp.parent) case tp: AnnotatedType => sourceOfSelf(tp.parent) + case tp: ThisType => tp.cls } sourceOfSelf(selfType) case info: LazyType => @@ -1143,6 +1196,7 @@ object SymDenotations { isOneOf(EffectivelyFinalFlags) || is(Inline, butNot = Deferred) || is(JavaDefinedVal, butNot = Method) + || isConstructor || !owner.isExtensibleClass /** A class is effectively sealed if has the `final` or `sealed` modifier, or it @@ -1151,9 +1205,9 @@ object SymDenotations { final def isEffectivelySealed(using Context): Boolean = isOneOf(FinalOrSealed) || isClass && !isOneOf(EffectivelyOpenFlags) - final def isTransparentTrait(using Context): Boolean = - isAllOf(TransparentTrait) - || defn.assumedTransparentTraits.contains(symbol) + final def isTransparentClass(using Context): Boolean = + is(TransparentType) + || defn.isAssumedTransparent(symbol) || isClass && hasAnnotation(defn.TransparentTraitAnnot) /** The class containing this denotation which has the given effective name. */ @@ -1357,9 +1411,9 @@ object SymDenotations { case Nil => Iterator.empty } - /** The symbol overriding this symbol in given subclass `ofclazz`. + /** The symbol overriding this symbol in given subclass `inClass`. * - * @param ofclazz is a subclass of this symbol's owner + * @pre `inClass` is a subclass of this symbol's owner */ final def overridingSymbol(inClass: ClassSymbol)(using Context): Symbol = if (canMatchInheritedSymbols) matchingDecl(inClass, inClass.thisType) @@ -1827,19 +1881,21 @@ object SymDenotations { super.info_=(tp) } - /** The symbols of the parent classes. */ - def parentSyms(using Context): List[Symbol] = info match { - case classInfo: ClassInfo => classInfo.declaredParents.map(_.classSymbol) + /** The types of the parent classes. */ + def parentTypes(using Context): List[Type] = info match + case classInfo: ClassInfo => classInfo.declaredParents case _ => Nil - } + + /** The symbols of the parent classes. */ + def parentSyms(using Context): List[Symbol] = + parentTypes.map(_.classSymbol) /** The symbol of the superclass, NoSymbol if no superclass exists */ - def superClass(using Context): Symbol = parentSyms match { - case parent :: _ => - if (parent.is(Trait)) NoSymbol else parent - case _ => - NoSymbol - } + def superClass(using Context): Symbol = parentTypes match + case parentType :: _ => + val parentCls = parentType.classSymbol + if parentCls.is(Trait) then NoSymbol else parentCls + case _ => NoSymbol /** The explicitly given self type (self types of modules are assumed to be * explcitly given here). @@ -1901,20 +1957,20 @@ object SymDenotations { def computeBaseData(implicit onBehalf: BaseData, ctx: Context): (List[ClassSymbol], BaseClassSet) = { def emptyParentsExpected = is(Package) || (symbol == defn.AnyClass) || ctx.erasedTypes && (symbol == defn.ObjectClass) - val psyms = parentSyms - if (psyms.isEmpty && !emptyParentsExpected) + val parents = parentTypes + if (parents.isEmpty && !emptyParentsExpected) onBehalf.signalProvisional() val builder = new BaseDataBuilder - def traverse(parents: List[Symbol]): Unit = parents match { + def traverse(parents: List[Type]): Unit = parents match { case p :: parents1 => - p match { + p.classSymbol match { case pcls: ClassSymbol => builder.addAll(pcls.baseClasses) case _ => assert(isRefinementClass || p.isError || ctx.mode.is(Mode.Interactive), s"$this has non-class parent: $p") } traverse(parents1) case nil => } - traverse(psyms) + traverse(parents) (classSymbol :: builder.baseClasses, builder.baseClassSet) } @@ -1951,6 +2007,17 @@ object SymDenotations { /** Hook to do a pre-enter test. Overridden in PackageDenotation */ protected def proceedWithEnter(sym: Symbol, mscope: MutableScope)(using Context): Boolean = true + final override def isValueClass(using Context): Boolean = + val di = initial.asClass + val anyVal = defn.AnyValClass + if di.baseDataCache.isValid && !ctx.erasedTypes then + // fast path that does not demand time travel + (symbol eq anyVal) || di.baseClassSet.contains(anyVal) + else + // We call derivesFrom at the initial phase both because AnyVal does not exist + // after Erasure and to avoid cyclic references caused by forcing denotations + atPhase(di.validFor.firstPhaseId)(di.derivesFrom(anyVal)) + /** Enter a symbol in current scope, and future scopes of same denotation. * Note: We require that this does not happen after the first time * someone does a findMember on a subclass. @@ -2092,7 +2159,7 @@ object SymDenotations { Stats.record("basetype cache entries") if (!baseTp.exists) Stats.record("basetype cache NoTypes") } - if (!tp.isProvisional) + if (!tp.isProvisional && !CapturingType.isUncachable(tp)) btrCache(tp) = baseTp else btrCache.remove(tp) // Remove any potential sentinel value @@ -2106,8 +2173,9 @@ object SymDenotations { def recur(tp: Type): Type = try { tp match { case tp: CachedType => - val baseTp = btrCache.lookup(tp) - if (baseTp != null) return ensureAcyclic(baseTp) + val baseTp: Type | Null = btrCache.lookup(tp) + if (baseTp != null) + return ensureAcyclic(baseTp) case _ => } if (Stats.monitored) { @@ -2162,13 +2230,12 @@ object SymDenotations { def computeApplied = { btrCache(tp) = NoPrefix val baseTp = - if (tycon.typeSymbol eq symbol) tp - else (tycon.typeParams: @unchecked) match { + if (tycon.typeSymbol eq symbol) && !tycon.isLambdaSub then tp + else (tycon.typeParams: @unchecked) match case LambdaParam(_, _) :: _ => recur(tp.superType) case tparams: List[Symbol @unchecked] => recur(tycon).substApprox(tparams, args) - } record(tp, baseTp) baseTp } @@ -2251,9 +2318,11 @@ object SymDenotations { var names = Set[Name]() def maybeAdd(name: Name) = if (keepOnly(thisType, name)) names += name try { - for (p <- parentSyms if p.isClass) - for (name <- p.asClass.memberNames(keepOnly)) - maybeAdd(name) + for ptype <- parentTypes do + ptype.classSymbol match + case pcls: ClassSymbol => + for name <- pcls.memberNames(keepOnly) do + maybeAdd(name) val ownSyms = if (keepOnly eq implicitFilter) if (this.is(Package)) Iterator.empty @@ -2438,13 +2507,13 @@ object SymDenotations { val youngest = assocFiles.filter(_.lastModified == lastModDate) val chosen = youngest.head def ambiguousFilesMsg(f: AbstractFile) = - em"""Toplevel definition $name is defined in - | $chosen - |and also in - | $f""" + i"""Toplevel definition $name is defined in + | $chosen + |and also in + | $f""" if youngest.size > 1 then - throw TypeError(i"""${ambiguousFilesMsg(youngest.tail.head)} - |One of these files should be removed from the classpath.""") + throw TypeError(em"""${ambiguousFilesMsg(youngest.tail.head)} + |One of these files should be removed from the classpath.""") // Warn if one of the older files comes from a different container. // In that case picking the youngest file is not necessarily what we want, @@ -2454,15 +2523,18 @@ object SymDenotations { try f.container == chosen.container catch case NonFatal(ex) => true if !ambiguityWarningIssued then for conflicting <- assocFiles.find(!sameContainer(_)) do - report.warning(i"""${ambiguousFilesMsg(conflicting.nn)} - |Keeping only the definition in $chosen""") + report.warning(em"""${ambiguousFilesMsg(conflicting.nn)} + |Keeping only the definition in $chosen""") ambiguityWarningIssued = true multi.filterWithPredicate(_.symbol.associatedFile == chosen) end dropStale - if symbol eq defn.ScalaPackageClass then + if name == nme.CONSTRUCTOR then + NoDenotation // packages don't have constructors, even if package objects do. + else if symbol eq defn.ScalaPackageClass then + // revert order: search package first, then nested package objects val denots = super.computeMembersNamed(name) - if denots.exists || name == nme.CONSTRUCTOR then denots + if denots.exists then denots else recur(packageObjs, NoDenotation) else recur(packageObjs, NoDenotation) end computeMembersNamed @@ -2505,7 +2577,6 @@ object SymDenotations { @sharable object NoDenotation extends SymDenotation(NoSymbol, NoSymbol, "".toTermName, Permanent, NoType) { - override def isType: Boolean = false override def isTerm: Boolean = false override def exists: Boolean = false override def owner: Symbol = throw new AssertionError("NoDenotation.owner") @@ -2802,7 +2873,7 @@ object SymDenotations { } def isValidAt(phase: Phase)(using Context) = - checkedPeriod == ctx.period || + checkedPeriod.code == ctx.period.code || createdAt.runId == ctx.runId && createdAt.phaseId < unfusedPhases.length && sameGroup(unfusedPhases(createdAt.phaseId), phase) && diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index c5ae98853061..9eb67b468cfa 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -88,8 +88,8 @@ object SymbolLoaders { return NoSymbol } else - throw new TypeError( - i"""$owner contains object and package with same name: $pname + throw TypeError( + em"""$owner contains object and package with same name: $pname |one of them needs to be removed from classpath""") newModuleSymbol(owner, pname, PackageCreationFlags, PackageCreationFlags, completer).entered @@ -331,8 +331,9 @@ abstract class SymbolLoader extends LazyType { self => if (ctx.debug) ex.printStackTrace() val msg = ex.getMessage() report.error( - if (msg == null) "i/o error while loading " + root.name - else "error while loading " + root.name + ",\n" + msg) + if msg == null then em"i/o error while loading ${root.name}" + else em"""error while loading ${root.name}, + |$msg""") } try { val start = System.currentTimeMillis diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 775062c26b0c..07ac2be90819 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -40,7 +40,7 @@ object Symbols { val Ids: Property.Key[Array[String]] = new Property.Key /** A Symbol represents a Scala definition/declaration or a package. - * @param coord The coordinates of the symbol (a position or an index) + * @param myCoord The coordinates of the symbol (a position or an index) * @param id A unique identifier of the symbol (unique per ContextBase) */ class Symbol private[Symbols] (private var myCoord: Coord, val id: Int, val nestingLevel: Int) @@ -103,7 +103,7 @@ object Symbols { /** The current denotation of this symbol */ final def denot(using Context): SymDenotation = { util.Stats.record("Symbol.denot") - if (checkedPeriod == ctx.period) lastDenot + if checkedPeriod.code == ctx.period.code then lastDenot else computeDenot(lastDenot) } @@ -170,7 +170,7 @@ object Symbols { asInstanceOf[TermSymbol] } final def asType(using Context): TypeSymbol = { - assert(isType, s"isType called on not-a-Type $this"); + assert(isType, s"asType called on not-a-Type $this"); asInstanceOf[TypeSymbol] } @@ -630,6 +630,32 @@ object Symbols { owner.thisType, modcls, parents, decls, TermRef(owner.thisType, module)), privateWithin, coord, assocFile) + /** Same as `newCompleteModuleSymbol` except that `parents` can be a list of arbitrary + * types which get normalized into type refs and parameter bindings. + */ + def newNormalizedModuleSymbol( + owner: Symbol, + name: TermName, + modFlags: FlagSet, + clsFlags: FlagSet, + parentTypes: List[Type], + decls: Scope, + privateWithin: Symbol = NoSymbol, + coord: Coord = NoCoord, + assocFile: AbstractFile | Null = null)(using Context): TermSymbol = { + def completer(module: Symbol) = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val cls = denot.asClass.classSymbol + val decls = newScope + denot.info = ClassInfo(owner.thisType, cls, parentTypes.map(_.dealias), decls, TermRef(owner.thisType, module)) + } + } + newModuleSymbol( + owner, name, modFlags, clsFlags, + (module, modcls) => completer(module), + privateWithin, coord, assocFile) + } + /** Create a package symbol with associated package class * from its non-info fields and a lazy type for loading the package's members. */ @@ -660,7 +686,7 @@ object Symbols { addToGadt: Boolean = true, flags: FlagSet = EmptyFlags)(using Context): Symbol = { val sym = newSymbol(ctx.owner, name, Case | flags, info, coord = span) - if (addToGadt && name.isTypeName) ctx.gadt.addToConstraint(sym) + if (addToGadt && name.isTypeName) ctx.gadtState.addToConstraint(sym) sym } diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 81f822811456..2e8aee4df96c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -9,9 +9,11 @@ import SymDenotations.LazyType import Decorators._ import util.Stats._ import Names._ +import StdNames.nme import Flags.{Module, Provisional} import dotty.tools.dotc.config.Config import cc.boxedUnlessFun +import dotty.tools.dotc.transform.TypeUtils.isErasedValueType object TypeApplications { @@ -503,6 +505,14 @@ class TypeApplications(val self: Type) extends AnyVal { case AppliedType(tycon, args) => args.boxedUnlessFun(tycon) case _ => Nil + /** If this is an encoding of a function type, return its arguments, otherwise return Nil. + * Handles `ErasedFunction`s and poly functions gracefully. + */ + final def functionArgInfos(using Context): List[Type] = self.dealias match + case RefinedType(parent, nme.apply, mt: MethodType) if defn.isErasedFunctionType(parent) => (mt.paramInfos :+ mt.resultType) + case RefinedType(parent, nme.apply, mt: MethodType) if parent.typeSymbol eq defn.PolyFunctionClass => (mt.paramInfos :+ mt.resultType) + case _ => self.dropDependentRefinement.dealias.argInfos + /** Argument types where existential types in arguments are disallowed */ def argTypes(using Context): List[Type] = argInfos mapConserve noBounds @@ -533,6 +543,9 @@ class TypeApplications(val self: Type) extends AnyVal { case JavaArrayType(elemtp) => elemtp case tp: OrType if tp.tp1.isBottomType => tp.tp2.elemType case tp: OrType if tp.tp2.isBottomType => tp.tp1.elemType - case _ => self.baseType(defn.SeqClass).argInfos.headOption.getOrElse(NoType) + case _ => + self.baseType(defn.SeqClass) + .orElse(self.baseType(defn.ArrayClass)) + .argInfos.headOption.getOrElse(NoType) } } diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 283a7e3a474e..6857e3da38ed 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -23,7 +23,7 @@ import typer.ProtoTypes.constrained import typer.Applications.productSelectorTypes import reporting.trace import annotation.constructorOnly -import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam} +import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam, isAlwaysPure} /** Provides methods to compare types. */ @@ -60,8 +60,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Indicates whether the subtype check used GADT bounds */ private var GADTused: Boolean = false - protected var canWidenAbstract: Boolean = true - private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance @@ -118,7 +116,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private def isBottom(tp: Type) = tp.widen.isRef(NothingClass) protected def gadtBounds(sym: Symbol)(using Context) = ctx.gadt.bounds(sym) - protected def gadtAddBound(sym: Symbol, b: Type, isUpper: Boolean): Boolean = ctx.gadt.addBound(sym, b, isUpper) + protected def gadtAddBound(sym: Symbol, b: Type, isUpper: Boolean): Boolean = ctx.gadtState.addBound(sym, b, isUpper) protected def typeVarInstance(tvar: TypeVar)(using Context): Type = tvar.underlying @@ -285,17 +283,28 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val ctx = comparerContext given Context = ctx // optimization for performance val info2 = tp2.info + + /** Does `tp2` have a stable prefix? + * If that's not the case, following an alias via asSeenFrom could be lossy + * so we should not conclude `false` if comparing aliases fails. + * See pos/i17064.scala for a test case + */ + def hasStablePrefix(tp: NamedType) = + tp.prefix.isStable + info2 match case info2: TypeAlias => if recur(tp1, info2.alias) then return true - if tp2.asInstanceOf[TypeRef].canDropAlias then return false + if tp2.asInstanceOf[TypeRef].canDropAlias && hasStablePrefix(tp2) then + return false case _ => tp1 match case tp1: NamedType => tp1.info match { case info1: TypeAlias => if recur(info1.alias, tp2) then return true - if tp1.asInstanceOf[TypeRef].canDropAlias then return false + if tp1.asInstanceOf[TypeRef].canDropAlias && hasStablePrefix(tp2) then + return false case _ => } val sym2 = tp2.symbol @@ -304,13 +313,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // For convenience we want X$ <:< X.type // This is safe because X$ self-type is X.type sym1 = sym1.companionModule - if ((sym1 ne NoSymbol) && (sym1 eq sym2)) + if (sym1 ne NoSymbol) && (sym1 eq sym2) then ctx.erasedTypes || sym1.isStaticOwner || isSubPrefix(tp1.prefix, tp2.prefix) || thirdTryNamed(tp2) else ( (tp1.name eq tp2.name) + && !sym1.is(Private) && tp2.isPrefixDependentMemberRef && isSubPrefix(tp1.prefix, tp2.prefix) && tp1.signature == tp2.signature @@ -420,16 +430,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling true } def compareTypeParamRef = - assumedTrue(tp1) || - tp2.match { - case tp2: TypeParamRef => constraint.isLess(tp1, tp2) - case _ => false - } || - isSubTypeWhenFrozen(bounds(tp1).hi.boxed, tp2) || { - if (canConstrain(tp1) && !approx.high) - addConstraint(tp1, tp2, fromBelow = false) && flagNothingBound - else thirdTry - } + assumedTrue(tp1) + || tp2.dealias.match + case tp2a: TypeParamRef => constraint.isLess(tp1, tp2a) + case tp2a: AndType => recur(tp1, tp2a) + case _ => false + || isSubTypeWhenFrozen(bounds(tp1).hi.boxed, tp2) + || (if canConstrain(tp1) && !approx.high then + addConstraint(tp1, tp2, fromBelow = false) && flagNothingBound + else thirdTry) + compareTypeParamRef case tp1: ThisType => val cls1 = tp1.cls @@ -522,7 +532,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling res case CapturingType(parent1, refs1) => - if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + if tp2.isAny then true + else if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + || !ctx.mode.is(Mode.CheckBoundsOrSelfType) && tp1.isAlwaysPure then recur(parent1, tp2) else thirdTry case tp1: AnnotatedType if !tp1.isRefining => @@ -579,13 +591,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val base = nonExprBaseType(tp1, cls2) if (base.typeSymbol == cls2) return true } - else if tp1.isLambdaSub && !tp1.isAnyKind then + else if tp1.typeParams.nonEmpty && !tp1.isAnyKind then return recur(tp1, EtaExpansion(tp2)) fourthTry } def compareTypeParamRef(tp2: TypeParamRef): Boolean = - assumedTrue(tp2) || { + assumedTrue(tp2) + || { val alwaysTrue = // The following condition is carefully formulated to catch all cases // where the subtype relation is true without needing to add a constraint @@ -596,11 +609,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // widening in `fourthTry` before adding to the constraint. if (frozenConstraint) recur(tp1, bounds(tp2).lo.boxed) else isSubTypeWhenFrozen(tp1, tp2) - alwaysTrue || { - if (canConstrain(tp2) && !approx.low) - addConstraint(tp2, tp1.widenExpr, fromBelow = true) - else fourthTry - } + alwaysTrue + || tp1.dealias.match + case tp1a: OrType => recur(tp1a, tp2) + case _ => false + || (if canConstrain(tp2) && !approx.low then + addConstraint(tp2, tp1.widenExpr, fromBelow = true) + else fourthTry) } def thirdTry: Boolean = tp2 match { @@ -826,7 +841,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if refs1.isAlwaysEmpty then recur(tp1, parent2) else subCaptures(refs1, refs2, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) - && recur(tp1.widen.stripCapturing, parent2) + && (recur(tp1.widen.stripCapturing, parent2) + || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) + // this alternative is needed in case the right hand side is a + // capturing type that contains the lhs as an alternative of a union type. + ) catch case ex: AssertionError => println(i"assertion failed while compare captured $tp1 <:< $tp2") throw ex @@ -1331,8 +1350,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } } || tryLiftedToThis2 - case _: TypeVar => - recur(tp1, tp2.superType) + case tv: TypeVar => + if tv.isInstantiated then + recur(tp1, tp2.superType) + else + compareAppliedType2(tp2, tv.origin, args2) case tycon2: AnnotatedType if !tycon2.isRefining => recur(tp1, tp2.superType) case tycon2: AppliedType => @@ -1435,11 +1457,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if tp2 eq NoType then false else if tp1 eq tp2 then true else - val saved = constraint - val savedGadt = ctx.gadt.fresh + val savedCstr = constraint + val savedGadt = ctx.gadt inline def restore() = - state.constraint = saved - ctx.gadt.restore(savedGadt) + state.constraint = savedCstr + ctx.gadtState.restore(savedGadt) val savedSuccessCount = successCount try recCount += 1 @@ -1845,16 +1867,17 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling */ private def necessaryEither(op1: => Boolean, op2: => Boolean): Boolean = val preConstraint = constraint - val preGadt = ctx.gadt.fresh + val preGadt = ctx.gadt def allSubsumes(leftGadt: GadtConstraint, rightGadt: GadtConstraint, left: Constraint, right: Constraint): Boolean = - subsumes(left, right, preConstraint) && preGadt.subsumes(leftGadt, rightGadt, preGadt) + subsumes(left, right, preConstraint) + && subsumes(leftGadt.constraint, rightGadt.constraint, preGadt.constraint) if op1 then val op1Constraint = constraint - val op1Gadt = ctx.gadt.fresh + val op1Gadt = ctx.gadt constraint = preConstraint - ctx.gadt.restore(preGadt) + ctx.gadtState.restore(preGadt) if op2 then if allSubsumes(op1Gadt, ctx.gadt, op1Constraint, constraint) then gadts.println(i"GADT CUT - prefer ${ctx.gadt} over $op1Gadt") @@ -1863,15 +1886,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling gadts.println(i"GADT CUT - prefer $op1Gadt over ${ctx.gadt}") constr.println(i"CUT - prefer $op1Constraint over $constraint") constraint = op1Constraint - ctx.gadt.restore(op1Gadt) + ctx.gadtState.restore(op1Gadt) else gadts.println(i"GADT CUT - no constraint is preferable, reverting to $preGadt") constr.println(i"CUT - no constraint is preferable, reverting to $preConstraint") constraint = preConstraint - ctx.gadt.restore(preGadt) + ctx.gadtState.restore(preGadt) else constraint = op1Constraint - ctx.gadt.restore(op1Gadt) + ctx.gadtState.restore(op1Gadt) true else op2 end necessaryEither @@ -2043,10 +2066,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling gadts.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.toString} ${bound.isRef(tparam)}") if (bound.isRef(tparam)) false else - val savedGadt = ctx.gadt.fresh - val success = gadtAddBound(tparam, bound, isUpper) - if !success then ctx.gadt.restore(savedGadt) - success + ctx.gadtState.rollbackGadtUnless(gadtAddBound(tparam, bound, isUpper)) } } @@ -2110,7 +2130,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case nil => formals2.isEmpty } - loop(tp1.paramInfos, tp2.paramInfos) + // If methods have erased parameters, then the erased parameters must match + val erasedValid = (!tp1.hasErasedParams && !tp2.hasErasedParams) || (tp1.erasedParams == tp2.erasedParams) + + erasedValid && loop(tp1.paramInfos, tp2.paramInfos) } /** Do the parameter types of `tp1` and `tp2` match in a way that allows `tp1` @@ -2713,7 +2736,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling x && { t.dealias match { case tp: TypeRef if !tp.symbol.isClass => false - case _: SkolemType | _: TypeVar | _: TypeParamRef => false + case _: SkolemType | _: TypeVar | _: TypeParamRef | _: TypeBounds => false case _ => foldOver(x, t) } } @@ -3157,7 +3180,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { tp case Nil => val casesText = MatchTypeTrace.noMatchesText(scrut, cases) - throw new TypeError(s"Match type reduction $casesText") + throw TypeError(em"Match type reduction $casesText") inFrozenConstraint { // Empty types break the basic assumption that if a scrutinee and a diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 1fc7ee5d22a8..9bcb3eca36bb 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -520,8 +520,9 @@ object TypeErasure { case _: ClassInfo => true case _ => false } - case tp: TypeParamRef => false - case tp: TypeBounds => false + case _: TypeParamRef => false + case _: TypeBounds => false + case _: MatchType => false case tp: TypeProxy => hasStableErasure(tp.translucentSuperType) case tp: AndType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2) case tp: OrType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2) @@ -535,7 +536,14 @@ object TypeErasure { val paramss = res.paramNamess assert(paramss.length == 1) erasure(defn.FunctionType(paramss.head.length, - isContextual = res.isImplicitMethod, isErased = res.isErasedMethod)) + isContextual = res.isImplicitMethod)) + + def eraseErasedFunctionApply(erasedFn: MethodType)(using Context): Type = + val fnType = defn.FunctionType( + n = erasedFn.erasedParams.count(_ == false), + isContextual = erasedFn.isContextualMethod, + ) + erasure(fnType) } import TypeErasure._ @@ -591,9 +599,9 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst tp case tp: TypeRef => val sym = tp.symbol - if (!sym.isClass) this(tp.translucentSuperType) - else if (semiEraseVCs && isDerivedValueClass(sym)) eraseDerivedValueClass(tp) - else if (defn.isSyntheticFunctionClass(sym)) defn.functionTypeErasure(sym) + if !sym.isClass then this(checkedSuperType(tp)) + else if semiEraseVCs && isDerivedValueClass(sym) then eraseDerivedValueClass(tp) + else if defn.isSyntheticFunctionClass(sym) then defn.functionTypeErasure(sym) else eraseNormalClassRef(tp) case tp: AppliedType => val tycon = tp.tycon @@ -601,7 +609,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst else if (tycon.isRef(defn.PairClass)) erasePair(tp) else if (tp.isRepeatedParam) apply(tp.translateFromRepeated(toArray = sourceLanguage.isJava)) else if (semiEraseVCs && isDerivedValueClass(tycon.classSymbol)) eraseDerivedValueClass(tp) - else apply(tp.translucentSuperType) + else this(checkedSuperType(tp)) case tp: TermRef => this(underlyingOfTermRef(tp)) case _: ThisType => @@ -612,6 +620,8 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst defn.FunctionType(0) case RefinedType(parent, nme.apply, refinedInfo) if parent.typeSymbol eq defn.PolyFunctionClass => erasePolyFunctionApply(refinedInfo) + case RefinedType(parent, nme.apply, refinedInfo: MethodType) if defn.isErasedFunctionType(parent) => + eraseErasedFunctionApply(refinedInfo) case tp: TypeProxy => this(tp.underlying) case tp @ AndType(tp1, tp2) => @@ -638,7 +648,13 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst case tp: MethodType => def paramErasure(tpToErase: Type) = erasureFn(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, wildcardOK)(tpToErase) - val (names, formals0) = if (tp.isErasedMethod) (Nil, Nil) else (tp.paramNames, tp.paramInfos) + val (names, formals0) = if tp.hasErasedParams then + tp.paramNames + .zip(tp.paramInfos) + .zip(tp.erasedParams) + .collect{ case (param, isErased) if !isErased => param } + .unzip + else (tp.paramNames, tp.paramInfos) val formals = formals0.mapConserve(paramErasure) eraseResult(tp.resultType) match { case rt: MethodType => @@ -689,6 +705,18 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst tp } + /** Like translucentSuperType, but issue a fatal error if it does not exist. */ + private def checkedSuperType(tp: TypeProxy)(using Context): Type = + val tp1 = tp.translucentSuperType + if !tp1.exists then + val msg = tp.typeConstructor match + case tycon: TypeRef => + MissingType(tycon.prefix, tycon.name).toMessage.message + case _ => + i"Cannot resolve reference to $tp" + throw FatalError(msg) + tp1 + /** Widen term ref, skipping any `()` parameter of an eventual getter. Used to erase a TermRef. * Since getters are introduced after erasure, one would think that erasing a TermRef * could just use `widen`. However, it's possible that the TermRef got read from a class @@ -815,7 +843,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst throw new MissingType(tp.prefix, tp.name) val sym = tp.symbol if (!sym.isClass) { - val info = tp.translucentSuperType + val info = checkedSuperType(tp) if (!info.exists) assert(false, i"undefined: $tp with symbol $sym") return sigName(info) } @@ -841,7 +869,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst sigName( // todo: what about repeatedParam? if (erasureDependsOnArgs(sym)) this(tp) else if (sym.isClass) tp.underlying - else tp.translucentSuperType) + else checkedSuperType(tp)) case ErasedValueType(_, underlying) => sigName(underlying) case JavaArrayType(elem) => @@ -858,6 +886,8 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst // because RefinedTypes <: TypeProxy and it would be caught by // the case immediately below sigName(this(tp)) + case tp @ RefinedType(parent, nme.apply, refinedInfo) if defn.isErasedFunctionType(parent) => + sigName(this(tp)) case tp: TypeProxy => sigName(tp.underlying) case tp: WildcardType => diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index a3b594eb0f09..24a207da6836 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -12,39 +12,58 @@ import Denotations._ import Decorators._ import reporting._ import ast.untpd -import config.Printers.cyclicErrors - -class TypeError(msg: String) extends Exception(msg) { - def this() = this("") - final def toMessage(using Context): Message = - withMode(Mode.Printing)(produceMessage) - def produceMessage(using Context): Message = super.getMessage.nn.toMessage - override def getMessage: String = super.getMessage.nn -} - -class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name]) extends TypeError { - override def produceMessage(using Context): Message = - i"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" - .toMessage -} - -class MissingType(pre: Type, name: Name) extends TypeError { +import config.Printers.{cyclicErrors, noPrinter} + +import scala.annotation.constructorOnly + +abstract class TypeError(using creationContext: Context) extends Exception(""): + + /** Will the stack trace of this exception be filled in? + * This is expensive and only useful for debugging purposes. + */ + def computeStackTrace: Boolean = + ctx.debug || (cyclicErrors != noPrinter && this.isInstanceOf[CyclicReference] && !(ctx.mode is Mode.CheckCyclic)) + + override def fillInStackTrace(): Throwable = + if computeStackTrace then super.fillInStackTrace().nn + else this + + /** Convert to message. This takes an additional Context, so that we + * use the context when the message is first produced, i.e. when the TypeError + * is handled. This makes a difference for CyclicErrors since we need to know + * the context where the completed symbol is referenced, but the creation + * context of the CyclicReference is the completion context for the symbol. + * See i2887b for a test case, where we want to see + * "recursive or overloaded method needs result type". + */ + def toMessage(using Context): Message + + /** Uses creationContext to produce the message */ + override def getMessage: String = toMessage.message + +object TypeError: + def apply(msg: Message)(using Context) = new TypeError: + def toMessage(using Context) = msg +end TypeError + +class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name])(using Context) extends TypeError: + def toMessage(using Context) = em"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" + +class MissingType(pre: Type, name: Name)(using Context) extends TypeError: private def otherReason(pre: Type)(using Context): String = pre match { case pre: ThisType if pre.cls.givenSelfType.exists => i"\nor the self type of $pre might not contain all transitive dependencies" case _ => "" } - override def produceMessage(using Context): Message = { - if (ctx.debug) printStackTrace() - i"""cannot resolve reference to type $pre.$name - |the classfile defining the type might be missing from the classpath${otherReason(pre)}""" - .toMessage - } -} + override def toMessage(using Context): Message = + if ctx.debug then printStackTrace() + em"""cannot resolve reference to type $pre.$name + |the classfile defining the type might be missing from the classpath${otherReason(pre)}""" +end MissingType -class RecursionOverflow(val op: String, details: => String, val previous: Throwable, val weight: Int) -extends TypeError { +class RecursionOverflow(val op: String, details: => String, val previous: Throwable, val weight: Int)(using Context) +extends TypeError: def explanation: String = s"$op $details" @@ -71,50 +90,51 @@ extends TypeError { (rs.map(_.explanation): List[String]).mkString("\n ", "\n| ", "") } - override def produceMessage(using Context): Message = NoExplanation { + override def toMessage(using Context): Message = val mostCommon = recursions.groupBy(_.op).toList.maxBy(_._2.map(_.weight).sum)._2.reverse - s"""Recursion limit exceeded. - |Maybe there is an illegal cyclic reference? - |If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. - |For the unprocessed stack trace, compile with -Yno-decode-stacktraces. - |A recurring operation is (inner to outer): - |${opsString(mostCommon)}""".stripMargin - } + em"""Recursion limit exceeded. + |Maybe there is an illegal cyclic reference? + |If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. + |For the unprocessed stack trace, compile with -Yno-decode-stacktraces. + |A recurring operation is (inner to outer): + |${opsString(mostCommon).stripMargin}""" override def fillInStackTrace(): Throwable = this override def getStackTrace(): Array[StackTraceElement] = previous.getStackTrace().asInstanceOf -} +end RecursionOverflow /** Post-process exceptions that might result from StackOverflow to add * tracing information while unwalking the stack. */ // Beware: Since this object is only used when handling a StackOverflow, this code // cannot consume significant amounts of stack. -object handleRecursive { +object handleRecursive: + inline def underlyingStackOverflowOrNull(exc: Throwable): Throwable | Null = + var e: Throwable | Null = exc + while e != null && !e.isInstanceOf[StackOverflowError] do e = e.getCause + e + def apply(op: String, details: => String, exc: Throwable, weight: Int = 1)(using Context): Nothing = - if (ctx.settings.YnoDecodeStacktraces.value) + if ctx.settings.YnoDecodeStacktraces.value then throw exc - else - exc match { - case _: RecursionOverflow => - throw new RecursionOverflow(op, details, exc, weight) - case _ => - var e: Throwable | Null = exc - while (e != null && !e.isInstanceOf[StackOverflowError]) e = e.getCause - if (e != null) throw new RecursionOverflow(op, details, e, weight) - else throw exc - } -} + else exc match + case _: RecursionOverflow => + throw new RecursionOverflow(op, details, exc, weight) + case _ => + val so = underlyingStackOverflowOrNull(exc) + if so != null then throw new RecursionOverflow(op, details, so, weight) + else throw exc +end handleRecursive /** * This TypeError signals that completing denot encountered a cycle: it asked for denot.info (or similar), * so it requires knowing denot already. * @param denot */ -class CyclicReference private (val denot: SymDenotation) extends TypeError { +class CyclicReference private (val denot: SymDenotation)(using Context) extends TypeError: var inImplicitSearch: Boolean = false - override def produceMessage(using Context): Message = { + override def toMessage(using Context): Message = val cycleSym = denot.symbol // cycleSym.flags would try completing denot and would fail, but here we can use flagsUNSAFE to detect flags @@ -151,19 +171,16 @@ class CyclicReference private (val denot: SymDenotation) extends TypeError { CyclicReferenceInvolving(denot) errorMsg(ctx) - } -} + end toMessage -object CyclicReference { - def apply(denot: SymDenotation)(using Context): CyclicReference = { +object CyclicReference: + def apply(denot: SymDenotation)(using Context): CyclicReference = val ex = new CyclicReference(denot) - if (!(ctx.mode is Mode.CheckCyclic) || ctx.settings.Ydebug.value) { + if ex.computeStackTrace then cyclicErrors.println(s"Cyclic reference involving! $denot") val sts = ex.getStackTrace.asInstanceOf[Array[StackTraceElement]] for (elem <- sts take 200) cyclicErrors.println(elem.toString) - } ex - } -} +end CyclicReference diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index 7ec0f12db3b6..b5684b07f181 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -91,7 +91,7 @@ object TypeEval: val result = try op catch case e: Throwable => - throw new TypeError(e.getMessage.nn) + throw TypeError(em"${e.getMessage.nn}") ConstantType(Constant(result)) def constantFold1[T](extractor: Type => Option[T], op: T => Any): Option[Type] = diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index c9b2d3334f47..6809e4b9083c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Contexts._, Types._, Symbols._, Names._, Flags._ +import Contexts._, Types._, Symbols._, Names._, NameKinds.*, Flags._ import SymDenotations._ import util.Spans._ import util.Stats @@ -13,6 +13,7 @@ import ast.tpd._ import reporting.trace import config.Printers.typr import config.Feature +import transform.SymUtils.* import typer.ProtoTypes._ import typer.ForceDegree import typer.Inferencing._ @@ -186,7 +187,7 @@ object TypeOps: if (normed.exists) normed else mapOver case tp: MethodicType => // See documentation of `Types#simplified` - val addTypeVars = new TypeMap: + val addTypeVars = new TypeMap with IdempotentCaptRefMap: val constraint = ctx.typerState.constraint def apply(t: Type): Type = t match case t: TypeParamRef => constraint.typeVarOfParam(t).orElse(t) @@ -504,7 +505,7 @@ object TypeOps: override def derivedSelect(tp: NamedType, pre: Type) = if (pre eq tp.prefix) tp - else tryWiden(tp, tp.prefix).orElse { + else (if pre.isSingleton then NoType else tryWiden(tp, tp.prefix)).orElse { if (tp.isTerm && variance > 0 && !pre.isSingleton) apply(tp.info.widenExpr) else if (upper(pre).member(tp.name).exists) @@ -539,6 +540,18 @@ object TypeOps: val sym = tp.symbol forbidden.contains(sym) + /** We need to split the set into upper and lower approximations + * only if it contains a local element. The idea here is that at the + * time we perform an `avoid` all local elements are already accounted for + * and no further elements will be added afterwards. So we can just keep + * the set as it is. See comment by @linyxus on #16261. + */ + override def needsRangeIfInvariant(refs: CaptureSet): Boolean = + refs.elems.exists { + case ref: TermRef => toAvoid(ref) + case _ => false + } + override def apply(tp: Type): Type = tp match case tp: TypeVar if mapCtx.typerState.constraint.contains(tp) => val lo = TypeComparer.instanceType( @@ -609,7 +622,7 @@ object TypeOps: boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type)( - using Context): List[BoundsViolation] = withMode(Mode.CheckBounds) { + using Context): List[BoundsViolation] = withMode(Mode.CheckBoundsOrSelfType) { val argTypes = args.tpes /** Replace all wildcards in `tps` with `#` where `` is the @@ -674,8 +687,8 @@ object TypeOps: val bound1 = massage(bound) if (bound1 ne bound) { if (checkCtx eq ctx) checkCtx = ctx.fresh.setFreshGADTBounds - if (!checkCtx.gadt.contains(sym)) checkCtx.gadt.addToConstraint(sym) - checkCtx.gadt.addBound(sym, bound1, fromBelow) + if (!checkCtx.gadt.contains(sym)) checkCtx.gadtState.addToConstraint(sym) + checkCtx.gadtState.addBound(sym, bound1, fromBelow) typr.println("install GADT bound $bound1 for when checking F-bounded $sym") } } @@ -726,7 +739,7 @@ object TypeOps: * If the subtyping is true, the instantiated type `p.child[Vs]` is * returned. Otherwise, `NoType` is returned. */ - def refineUsingParent(parent: Type, child: Symbol)(using Context): Type = { + def refineUsingParent(parent: Type, child: Symbol, mixins: List[Type] = Nil)(using Context): Type = { // is a place holder from Scalac, it is hopeless to instantiate it. // // Quote from scalac (from nsc/symtab/classfile/Pickler.scala): @@ -741,7 +754,7 @@ object TypeOps: val childTp = if (child.isTerm) child.termRef else child.typeRef inContext(ctx.fresh.setExploreTyperState().setFreshGADTBounds.addMode(Mode.GadtConstraintInference)) { - instantiateToSubType(childTp, parent).dealias + instantiateToSubType(childTp, parent, mixins).dealias } } @@ -752,7 +765,7 @@ object TypeOps: * * Otherwise, return NoType. */ - private def instantiateToSubType(tp1: NamedType, tp2: Type)(using Context): Type = { + private def instantiateToSubType(tp1: NamedType, tp2: Type, mixins: List[Type])(using Context): Type = trace(i"instantiateToSubType($tp1, $tp2, $mixins)", typr) { // In order for a child type S to qualify as a valid subtype of the parent // T, we need to test whether it is possible S <: T. // @@ -826,22 +839,57 @@ object TypeOps: } } - // Prefix inference, replace `p.C.this.Child` with `X.Child` where `X <: p.C` - // Note: we need to strip ThisType in `p` recursively. + /** Gather GADT symbols and `ThisType`s found in `tp2`, ie. the scrutinee. */ + object TraverseTp2 extends TypeTraverser: + val thisTypes = util.HashSet[ThisType]() + val gadtSyms = new mutable.ListBuffer[Symbol] + + def traverse(tp: Type) = { + val tpd = tp.dealias + if tpd ne tp then traverse(tpd) + else tp match + case tp: ThisType if !tp.tref.symbol.isStaticOwner && !thisTypes.contains(tp) => + thisTypes += tp + traverseChildren(tp.tref) + case tp: TypeRef if tp.symbol.isAbstractOrParamType => + gadtSyms += tp.symbol + traverseChildren(tp) + val owners = Iterator.iterate(tp.symbol)(_.maybeOwner).takeWhile(_.exists) + for sym <- owners do + // add ThisType's for the classes symbols in the ownership of `tp` + // for example, i16451.CanForward.scala, add `Namer.this`, as one of the owners of the type parameter `A1` + if sym.isClass && !sym.isAnonymousClass && !sym.isStaticOwner then + traverse(sym.thisType) + case _ => + traverseChildren(tp) + } + TraverseTp2.traverse(tp2) + val thisTypes = TraverseTp2.thisTypes + val gadtSyms = TraverseTp2.gadtSyms.toList + + // Prefix inference, given `p.C.this.Child`: + // 1. return it as is, if `C.this` is found in `tp`, i.e. the scrutinee; or + // 2. replace it with `X.Child` where `X <: p.C`, stripping ThisType in `p` recursively. // - // See tests/patmat/i3938.scala + // See tests/patmat/i3938.scala, tests/pos/i15029.more.scala, tests/pos/i16785.scala class InferPrefixMap extends TypeMap { var prefixTVar: Type | Null = null def apply(tp: Type): Type = tp match { - case ThisType(tref: TypeRef) if !tref.symbol.isStaticOwner => - if (tref.symbol.is(Module)) - TermRef(this(tref.prefix), tref.symbol.sourceModule) + case tp @ ThisType(tref) if !tref.symbol.isStaticOwner => + val symbol = tref.symbol + if thisTypes.contains(tp) then + prefixTVar = tp // e.g. tests/pos/i16785.scala, keep Outer.this + prefixTVar.uncheckedNN + else if symbol.is(Module) then + TermRef(this(tref.prefix), symbol.sourceModule) else if (prefixTVar != null) this(tref) else { prefixTVar = WildcardType // prevent recursive call from assigning it - val tref2 = this(tref.applyIfParameterized(tref.typeParams.map(_ => TypeBounds.empty))) - prefixTVar = newTypeVar(TypeBounds.upper(tref2)) + // e.g. tests/pos/i15029.more.scala, create a TypeVar for `Instances`' B, so we can disregard `Ints` + val tvars = tref.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } + val tref2 = this(tref.applyIfParameterized(tvars)) + prefixTVar = newTypeVar(TypeBounds.upper(tref2), DepParamName.fresh(tref.name)) prefixTVar.uncheckedNN } case tp => mapOver(tp) @@ -849,15 +897,11 @@ object TypeOps: } val inferThisMap = new InferPrefixMap - val tvars = tp1.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds) } + val tvars = tp1.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } val protoTp1 = inferThisMap.apply(tp1).appliedTo(tvars) - val getAbstractSymbols = new TypeAccumulator[List[Symbol]]: - def apply(xs: List[Symbol], tp: Type) = tp.dealias match - case tp: TypeRef if tp.symbol.exists && !tp.symbol.isClass => foldOver(tp.symbol :: xs, tp) - case tp => foldOver(xs, tp) - val syms2 = getAbstractSymbols(Nil, tp2).reverse - if syms2.nonEmpty then ctx.gadt.addToConstraint(syms2) + if gadtSyms.nonEmpty then + ctx.gadtState.addToConstraint(gadtSyms) // If parent contains a reference to an abstract type, then we should // refine subtype checking to eliminate abstract types according to @@ -869,10 +913,7 @@ object TypeOps: } def instantiate(): Type = { - // if there's a change in variance in type parameters (between subtype tp1 and supertype tp2) - // then we don't want to maximise the type variables in the wrong direction. - // For instance 15967, A[-Z] and B[Y] extends A[Y], we don't want to maximise Y to Any - maximizeType(protoTp1.baseType(tp2.classSymbol), NoSpan) + for tp <- mixins.reverseIterator do protoTp1 <:< tp maximizeType(protoTp1, NoSpan) wildApprox(protoTp1) } diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 3243bb242a56..fb66d133c0ba 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -43,6 +43,7 @@ import scala.annotation.internal.sharable import scala.annotation.threadUnsafe import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.TypeUtils.isErasedClass object Types { @@ -118,10 +119,9 @@ object Types { if t.mightBeProvisional then t.mightBeProvisional = t match case t: TypeRef => - !t.currentSymbol.isStatic && { + t.currentSymbol.isProvisional || !t.currentSymbol.isStatic && { (t: Type).mightBeProvisional = false // break cycles - t.symbol.isProvisional - || test(t.prefix, theAcc) + test(t.prefix, theAcc) || t.denot.infoOrCompleter.match case info: LazyType => true case info: AliasingBounds => test(info.alias, theAcc) @@ -246,6 +246,11 @@ object Types { case _ => false } + /** Is this type exactly `Any`, or a type lambda ending in `Any`? */ + def isTopOfSomeKind(using Context): Boolean = dealias match + case tp: TypeLambda => tp.resType.isTopOfSomeKind + case _ => isExactlyAny + def isBottomType(using Context): Boolean = if ctx.mode.is(Mode.SafeNulls) && !ctx.phase.erasedTypes then hasClassSymbol(defn.NothingClass) else isBottomTypeAfterErasure @@ -397,6 +402,10 @@ object Types { def isRepeatedParam(using Context): Boolean = typeSymbol eq defn.RepeatedParamClass + /** Is this a parameter type that allows implicit argument converson? */ + def isConvertibleParam(using Context): Boolean = + typeSymbol eq defn.IntoType + /** Is this the type of a method that has a repeated parameter type as * last parameter type? */ @@ -422,7 +431,7 @@ object Types { def isContextualMethod: Boolean = false /** Is this a MethodType for which the parameters will not be used? */ - def isErasedMethod: Boolean = false + def hasErasedParams(using Context): Boolean = false /** Is this a match type or a higher-kinded abstraction of one? */ @@ -536,7 +545,7 @@ object Types { case tp: ClassInfo => tp.cls :: Nil case AndType(l, r) => - l.parentSymbols(include) | r.parentSymbols(include) + l.parentSymbols(include).setUnion(r.parentSymbols(include)) case OrType(l, r) => l.parentSymbols(include) intersect r.parentSymbols(include) // TODO does not conform to spec case _ => @@ -731,43 +740,24 @@ object Types { // TODO: change tp.parent to nullable or other values if ((tp.parent: Type | Null) == null) NoDenotation else if (tp eq pre) go(tp.parent) - else { + else //println(s"find member $pre . $name in $tp") // We have to be careful because we might open the same (wrt eq) recursive type - // twice during findMember which risks picking the wrong prefix in the `substRecThis(rt, pre)` - // call below. To avoid this problem we do a defensive copy of the recursive - // type first. But if we do this always we risk being inefficient and we ran into - // stackoverflows when compiling pos/hk.scala under the refinement encoding - // of hk-types. So we only do a copy if the type - // is visited again in a recursive call to `findMember`, as tracked by `tp.opened`. - // Furthermore, if this happens we mark the original recursive type with `openedTwice` - // which means that we always defensively copy the type in the future. This second - // measure is necessary because findMember calls might be cached, so do not - // necessarily appear in nested order. - // Without the defensive copy, Typer.scala fails to compile at the line - // - // untpd.rename(lhsCore, setterName).withType(setterType), WildcardType) - // - // because the subtype check - // - // ThisTree[Untyped]#ThisTree[Typed] <: Tree[Typed] - // - // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.) - // - // Without the `openedTwice` trick, Typer.scala fails to Ycheck - // at phase resolveSuper. + // twice during findMember with two different prefixes, which risks picking the wrong prefix + // in the `substRecThis(rt, pre)` call below. To avoid this problem we do a defensive copy + // of the recursive type if the new prefix `pre` is neq the prefix with which the + // type was previously opened. + + val openedPre = tp.openedWithPrefix val rt = - if (tp.opened) { // defensive copy - tp.openedTwice = true + if openedPre.exists && (openedPre ne pre) then // defensive copy RecType(rt => tp.parent.substRecThis(tp, rt.recThis)) - } else tp - rt.opened = true + rt.openedWithPrefix = pre try go(rt.parent).mapInfo(_.substRecThis(rt, pre)) - finally - if (!rt.openedTwice) rt.opened = false - } + finally rt.openedWithPrefix = NoType + end goRec def goRefined(tp: RefinedType) = { val pdenot = go(tp.parent) @@ -775,11 +765,11 @@ object Types { val rinfo = tp.refinedInfo if (name.isTypeName && !pinfo.isInstanceOf[ClassInfo]) { // simplified case that runs more efficiently val jointInfo = - if rinfo.isInstanceOf[TypeAlias] && !ctx.mode.is(Mode.CheckBounds) then + if rinfo.isInstanceOf[TypeAlias] && !ctx.mode.is(Mode.CheckBoundsOrSelfType) then // In normal situations, the only way to "improve" on rinfo is to return an empty type bounds // So, we do not lose anything essential in "widening" to rinfo. // We need to compute the precise info only when checking for empty bounds - // which is communicated by the CheckBounds mode. + // which is communicated by the CheckBoundsOrSelfType mode. rinfo else if ctx.base.pendingMemberSearches.contains(name) then pinfo safe_& rinfo @@ -818,9 +808,14 @@ object Types { // is made to save execution time in the common case. See i9844.scala for test cases. def qualifies(sd: SingleDenotation) = !sd.symbol.is(Private) || sd.symbol.owner == tp.cls - d match + d.match case d: SingleDenotation => if qualifies(d) then d else NoDenotation case d => d.filterWithPredicate(qualifies) + .orElse: + // Only inaccessible private symbols were found. But there could still be + // shadowed non-private symbols, so as a fallback search for those. + // Test case is i18361.scala. + findMember(name, pre, required, excluded | Private) else d else // There is a special case to handle: @@ -1077,12 +1072,15 @@ object Types { * @param relaxedCheck if true type `Null` becomes a subtype of non-primitive value types in TypeComparer. * @param matchLoosely if true the types `=> T` and `()T` are seen as overriding each other. * @param checkClassInfo if true we check that ClassInfos are within bounds of abstract types + * + * @param isSubType a function used for checking subtype relationships. */ - final def overrides(that: Type, relaxedCheck: Boolean, matchLoosely: => Boolean, checkClassInfo: Boolean = true)(using Context): Boolean = { + final def overrides(that: Type, relaxedCheck: Boolean, matchLoosely: => Boolean, checkClassInfo: Boolean = true, + isSubType: (Type, Type) => Context ?=> Boolean = (tp1, tp2) => tp1 frozen_<:< tp2)(using Context): Boolean = { val overrideCtx = if relaxedCheck then ctx.relaxedOverrideContext else ctx inContext(overrideCtx) { !checkClassInfo && this.isInstanceOf[ClassInfo] - || (this.widenExpr frozen_<:< that.widenExpr) + || isSubType(this.widenExpr, that.widenExpr) || matchLoosely && { val this1 = this.widenNullaryMethod val that1 = that.widenNullaryMethod @@ -1184,7 +1182,8 @@ object Types { /** Remove all AnnotatedTypes wrapping this type. */ - def stripAnnots(using Context): Type = this + def stripAnnots(keep: Annotation => Context ?=> Boolean)(using Context): Type = this + final def stripAnnots(using Context): Type = stripAnnots(_ => false) /** Strip TypeVars and Annotation and CapturingType wrappers */ def stripped(using Context): Type = this @@ -1287,11 +1286,14 @@ object Types { * then the top-level union isn't widened. This is needed so that type inference can infer nullable types. */ def widenUnion(using Context): Type = widen match - case tp @ OrNull(tp1): OrType => - // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. - val tp1Widen = tp1.widenUnionWithoutNull - if (tp1Widen.isRef(defn.AnyClass)) tp1Widen - else tp.derivedOrType(tp1Widen, defn.NullType) + case tp: OrType => tp match + case OrNull(tp1) => + // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. + val tp1Widen = tp1.widenUnionWithoutNull + if (tp1Widen.isRef(defn.AnyClass)) tp1Widen + else tp.derivedOrType(tp1Widen, defn.NullType) + case _ => + tp.widenUnionWithoutNull case tp => tp.widenUnionWithoutNull @@ -1471,7 +1473,7 @@ object Types { /** Dealias, and if result is a dependent function type, drop the `apply` refinement. */ final def dropDependentRefinement(using Context): Type = dealias match { - case RefinedType(parent, nme.apply, _) => parent + case RefinedType(parent, nme.apply, mt) if defn.isNonRefinedFunction(parent) => parent case tp => tp } @@ -1574,8 +1576,6 @@ object Types { else NoType case SkolemType(tp) => loop(tp) - case pre: WildcardType => - WildcardType case pre: TypeRef => pre.info match { case TypeAlias(alias) => loop(alias) @@ -1713,6 +1713,8 @@ object Types { else NoType case t if defn.isNonRefinedFunction(t) => t + case t if defn.isErasedFunctionType(t) => + t case t @ SAMType(_) => t case _ => @@ -1840,15 +1842,15 @@ object Types { case mt: MethodType if !mt.isParamDependent => val formals1 = if (dropLast == 0) mt.paramInfos else mt.paramInfos dropRight dropLast val isContextual = mt.isContextualMethod && !ctx.erasedTypes - val isErased = mt.isErasedMethod && !ctx.erasedTypes val result1 = mt.nonDependentResultApprox match { case res: MethodType => res.toFunctionType(isJava) case res => res } val funType = defn.FunctionOf( formals1 mapConserve (_.translateFromRepeated(toArray = isJava)), - result1, isContextual, isErased) - if alwaysDependent || mt.isResultDependent then RefinedType(funType, nme.apply, mt) + result1, isContextual) + if alwaysDependent || mt.isResultDependent then + RefinedType(funType, nme.apply, mt) else funType } @@ -1871,6 +1873,11 @@ object Types { def dropRepeatedAnnot(using Context): Type = dropAnnot(defn.RepeatedAnnot) + /** A translation from types of original parameter ValDefs to the types + * of parameters in MethodTypes. + * Translates `Seq[T] @repeated` or `Array[T] @repeated` to `[T]`. + * That way, repeated arguments are made manifest without risk of dropped annotations. + */ def annotatedToRepeated(using Context): Type = this match { case tp @ ExprType(tp1) => tp.derivedExprType(tp1.annotatedToRepeated) @@ -2096,7 +2103,7 @@ object Types { */ final def isTracked(using Context): Boolean = canBeTracked && !captureSetOfInfo.isAlwaysEmpty - /** Is this reference the root capability `*` ? */ + /** Is this reference the root capability `cap` ? */ def isRootCapability(using Context): Boolean = false /** Normalize reference so that it can be compared with `eq` for equality */ @@ -2177,7 +2184,7 @@ object Types { // --- NamedTypes ------------------------------------------------------------------ - abstract class NamedType extends CachedProxyType, ValueType { self => + abstract class NamedType extends CachedProxyType, ValueType, Product { self => type ThisType >: this.type <: NamedType type ThisName <: Name @@ -2185,8 +2192,10 @@ object Types { val prefix: Type def designator: Designator protected def designator_=(d: Designator): Unit + def _1: Type + def _2: Designator - assert(prefix.isValueType || (prefix eq NoPrefix), s"invalid prefix $prefix") + assert(NamedType.validPrefix(prefix), s"invalid prefix $prefix") private var myName: Name | Null = null private var lastDenotation: Denotation | Null = null @@ -2261,15 +2270,17 @@ object Types { final def symbol(using Context): Symbol = // We can rely on checkedPeriod (unlike in the definition of `denot` below) // because SymDenotation#installAfter never changes the symbol - if (checkedPeriod == ctx.period) lastSymbol.nn else computeSymbol + if (checkedPeriod.code == ctx.period.code) lastSymbol.asInstanceOf[Symbol] + else computeSymbol private def computeSymbol(using Context): Symbol = - designator match { + val result = designator match case sym: Symbol => if (sym.isValidInCurrentRun) sym else denot.symbol case name => - (if (denotationIsCurrent) lastDenotation.nn else denot).symbol - } + (if (denotationIsCurrent) lastDenotation.asInstanceOf[Denotation] else denot).symbol + if checkedPeriod.code != NowhereCode then checkedPeriod = ctx.period + result /** There is a denotation computed which is valid (somewhere in) the * current run. @@ -2301,18 +2312,16 @@ object Types { def info(using Context): Type = denot.info - /** The denotation currently denoted by this type */ - final def denot(using Context): Denotation = { + /** The denotation currently denoted by this type. Extremely hot. Carefully optimized + * to be as small as possible. + */ + final def denot(using Context): Denotation = util.Stats.record("NamedType.denot") - val now = ctx.period + val lastd = lastDenotation.asInstanceOf[Denotation] // Even if checkedPeriod == now we still need to recheck lastDenotation.validFor // as it may have been mutated by SymDenotation#installAfter - if (checkedPeriod != Nowhere && lastDenotation.nn.validFor.contains(now)) { - checkedPeriod = now - lastDenotation.nn - } + if checkedPeriod.code != NowhereCode && lastd.validFor.contains(ctx.period) then lastd else computeDenot - } private def computeDenot(using Context): Denotation = { util.Stats.record("NamedType.computeDenot") @@ -2348,10 +2357,11 @@ object Types { lastDenotation match { case lastd0: SingleDenotation => val lastd = lastd0.skipRemoved - if (lastd.validFor.runId == ctx.runId && (checkedPeriod != Nowhere)) finish(lastd.current) + if lastd.validFor.runId == ctx.runId && checkedPeriod.code != NowhereCode then + finish(lastd.current) else lastd match { case lastd: SymDenotation => - if (stillValid(lastd) && (checkedPeriod != Nowhere)) finish(lastd.current) + if stillValid(lastd) && checkedPeriod.code != NowhereCode then finish(lastd.current) else finish(memberDenot(lastd.initial.name, allowPrivate = false)) case _ => fromDesignator @@ -2420,12 +2430,12 @@ object Types { } else { if (!ctx.reporter.errorsReported) - throw new TypeError( - i"""bad parameter reference $this at ${ctx.phase} - |the parameter is ${param.showLocated} but the prefix $prefix - |does not define any corresponding arguments. - |idx = $idx, args = $args%, %, - |constraint = ${ctx.typerState.constraint}""") + throw TypeError( + em"""bad parameter reference $this at ${ctx.phase} + |the parameter is ${param.showLocated} but the prefix $prefix + |does not define any corresponding arguments. + |idx = $idx, args = $args%, %, + |constraint = ${ctx.typerState.constraint}""") NoDenotation } } @@ -2437,9 +2447,8 @@ object Types { setDenot(memberDenot(name, allowPrivate = !symbol.exists || symbol.is(Private))) private def setDenot(denot: Denotation)(using Context): Unit = { - if (Config.checkNoDoubleBindings) - if (ctx.settings.YnoDoubleBindings.value) - checkSymAssign(denot.symbol) + if ctx.base.checkNoDoubleBindings then + checkSymAssign(denot.symbol) lastDenotation = denot lastSymbol = denot.symbol @@ -2453,6 +2462,8 @@ object Types { } private def checkDenot()(using Context) = {} + //if name.toString == "getConstructor" then + // println(i"set denot of $this to ${denot.info}, ${denot.getClass}, ${Phases.phaseOf(denot.validFor.lastPhaseId)} at ${ctx.phase}") private def checkSymAssign(sym: Symbol)(using Context) = { def selfTypeOf(sym: Symbol) = @@ -2494,10 +2505,49 @@ object Types { /** A reference with the initial symbol in `symd` has an info that * might depend on the given prefix. + * Note: If M is an abstract type or non-final term member in trait or class C, + * its info depends even on C.this if class C has a self type that refines + * the info of M. */ private def infoDependsOnPrefix(symd: SymDenotation, prefix: Type)(using Context): Boolean = + + def refines(tp: Type, name: Name): Boolean = tp match + case tp: TypeRef => + tp.symbol match + case cls: ClassSymbol => + val otherd = cls.nonPrivateMembersNamed(name) + otherd.exists && !otherd.containsSym(symd.symbol) + case tsym => + refines(tsym.info.hiBound, name) + // avoid going through tp.denot, since that might call infoDependsOnPrefix again + case RefinedType(parent, rname, _) => + rname == name || refines(parent, name) + case tp: TypeProxy => + refines(tp.underlying, name) + case AndType(tp1, tp2) => + refines(tp1, name) || refines(tp2, name) + case _ => + false + + def givenSelfTypeOrCompleter(cls: Symbol) = cls.infoOrCompleter match + case cinfo: ClassInfo => + cinfo.selfInfo match + case sym: Symbol => sym.infoOrCompleter + case tpe: Type => tpe + case _ => NoType + symd.maybeOwner.membersNeedAsSeenFrom(prefix) && !symd.is(NonMember) - || prefix.isInstanceOf[Types.ThisType] && symd.is(Opaque) // see pos/i11277.scala for a test where this matters + || prefix.match + case prefix: Types.ThisType => + (symd.isAbstractType + || symd.isTerm + && !symd.flagsUNSAFE.isOneOf(Module | Final | Param) + && !symd.isConstructor + && !symd.maybeOwner.isEffectivelyFinal) + && prefix.sameThis(symd.maybeOwner.thisType) + && refines(givenSelfTypeOrCompleter(prefix.cls), symd.name) + case _ => false + end infoDependsOnPrefix /** Is this a reference to a class or object member with an info that might depend * on the prefix? @@ -2507,10 +2557,7 @@ object Types { case _ => true } - /** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type - * to an (unbounded) wildcard type. - * - * (2) Reduce a type-ref `T { X = U; ... } # X` to `U` + /** Reduce a type-ref `T { X = U; ... } # X` to `U` * provided `U` does not refer with a RecThis to the * refinement type `T { X = U; ... }` */ @@ -2632,45 +2679,33 @@ object Types { case _ => } } - if (prefix.isInstanceOf[WildcardType]) WildcardType + if (prefix.isInstanceOf[WildcardType]) WildcardType.sameKindAs(this) else withPrefix(prefix) } /** A reference like this one, but with the given symbol, if it exists */ - final def withSym(sym: Symbol)(using Context): ThisType = - if ((designator ne sym) && sym.exists) NamedType(prefix, sym).asInstanceOf[ThisType] + private def withSym(sym: Symbol)(using Context): ThisType = + if designator ne sym then NamedType(prefix, sym).asInstanceOf[ThisType] + else this + + private def withName(name: Name)(using Context): ThisType = + if designator ne name then NamedType(prefix, name).asInstanceOf[ThisType] else this /** A reference like this one, but with the given denotation, if it exists. - * Returns a new named type with the denotation's symbol if that symbol exists, and - * one of the following alternatives applies: - * 1. The current designator is a symbol and the symbols differ, or - * 2. The current designator is a name and the new symbolic named type - * does not have a currently known denotation. - * 3. The current designator is a name and the new symbolic named type - * has the same info as the current info - * Otherwise the current denotation is overwritten with the given one. - * - * Note: (2) and (3) are a "lock in mechanism" where a reference with a name as - * designator can turn into a symbolic reference. - * - * Note: This is a subtle dance to keep the balance between going to symbolic - * references as much as we can (since otherwise we'd risk getting cycles) - * and to still not lose any type info in the denotation (since symbolic - * references often recompute their info directly from the symbol's info). - * A test case is neg/opaque-self-encoding.scala. + * Returns a new named type with the denotation's symbol as designator + * if that symbol exists and it is different from the current designator. + * Returns a new named type with the denotations's name as designator + * if the denotation is overloaded and its name is different from the + * current designator. */ final def withDenot(denot: Denotation)(using Context): ThisType = if denot.exists then - val adapted = withSym(denot.symbol) - val result = - if (adapted.eq(this) - || designator.isInstanceOf[Symbol] - || !adapted.denotationIsCurrent - || adapted.info.eq(denot.info)) - adapted + val adapted = + if denot.symbol.exists then withSym(denot.symbol) + else if denot.isOverloaded then withName(denot.name) else this - val lastDenot = result.lastDenotation + val lastDenot = adapted.lastDenotation denot match case denot: SymDenotation if denot.validFor.firstPhaseId < ctx.phase.id @@ -2680,20 +2715,20 @@ object Types { // In this case the new SymDenotation might be valid for all phases, which means // we would not recompute the denotation when travelling to an earlier phase, maybe // in the next run. We fix that problem by creating a UniqueRefDenotation instead. - core.println(i"overwrite ${result.toString} / ${result.lastDenotation}, ${result.lastDenotation.getClass} with $denot at ${ctx.phaseId}") - result.setDenot( + core.println(i"overwrite ${adapted.toString} / ${adapted.lastDenotation}, ${adapted.lastDenotation.getClass} with $denot at ${ctx.phaseId}") + adapted.setDenot( UniqueRefDenotation( denot.symbol, denot.info, Period(ctx.runId, ctx.phaseId, denot.validFor.lastPhaseId), this.prefix)) case _ => - result.setDenot(denot) - result.asInstanceOf[ThisType] + adapted.setDenot(denot) + adapted.asInstanceOf[ThisType] else // don't assign NoDenotation, we might need to recover later. Test case is pos/avoid.scala. this /** A reference like this one, but with the given prefix. */ - final def withPrefix(prefix: Type)(using Context): NamedType = { + final def withPrefix(prefix: Type)(using Context): Type = { def reload(): NamedType = { val lastSym = lastSymbol.nn val allowPrivate = !lastSym.exists || lastSym.is(Private) @@ -2706,6 +2741,7 @@ object Types { NamedType(prefix, name, d) } if (prefix eq this.prefix) this + else if !NamedType.validPrefix(prefix) then UnspecifiedErrorType else if (lastDenotation == null) NamedType(prefix, designator) else designator match { case sym: Symbol => @@ -2897,6 +2933,9 @@ object Types { def apply(prefix: Type, designator: Name, denot: Denotation)(using Context): NamedType = if (designator.isTermName) TermRef.apply(prefix, designator.asTermName, denot) else TypeRef.apply(prefix, designator.asTypeName, denot) + def unapply(tp: NamedType): NamedType = tp + + def validPrefix(prefix: Type): Boolean = prefix.isValueType || (prefix eq NoPrefix) } object TermRef { @@ -3154,9 +3193,8 @@ object Types { */ class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType { - // See discussion in findMember#goRec why these vars are needed - private[Types] var opened: Boolean = false - private[Types] var openedTwice: Boolean = false + // See discussion in findMember#goRec why this field is needed + private[Types] var openedWithPrefix: Type = NoType val parent: Type = parentExp(this: @unchecked) @@ -3311,11 +3349,11 @@ object Types { final class CachedAndType(tp1: Type, tp2: Type) extends AndType(tp1, tp2) object AndType { - def apply(tp1: Type, tp2: Type)(using Context): AndType = { - assert(tp1.isValueTypeOrWildcard && - tp2.isValueTypeOrWildcard, i"$tp1 & $tp2 / " + s"$tp1 & $tp2") + def apply(tp1: Type, tp2: Type)(using Context): AndType = + def where = i"in intersection $tp1 & $tp2" + expectValueTypeOrWildcard(tp1, where) + expectValueTypeOrWildcard(tp2, where) unchecked(tp1, tp2) - } def balanced(tp1: Type, tp2: Type)(using Context): AndType = tp1 match @@ -3355,7 +3393,7 @@ object Types { TypeComparer.liftIfHK(tp1, tp2, AndType.make(_, _, checkValid = false), makeHk, _ | _) } - abstract case class OrType(tp1: Type, tp2: Type) extends AndOrType { + abstract case class OrType protected(tp1: Type, tp2: Type) extends AndOrType { def isAnd: Boolean = false def isSoft: Boolean private var myBaseClassesPeriod: Period = Nowhere @@ -3388,9 +3426,6 @@ object Types { myFactorCount else 1 - assert(tp1.isValueTypeOrWildcard && - tp2.isValueTypeOrWildcard, s"$tp1 $tp2") - private var myJoin: Type = _ private var myJoinPeriod: Period = Nowhere @@ -3423,25 +3458,29 @@ object Types { private var myAtoms: Atoms = _ private var myWidened: Type = _ + private def computeAtoms()(using Context): Atoms = + if tp1.hasClassSymbol(defn.NothingClass) then tp2.atoms + else if tp2.hasClassSymbol(defn.NothingClass) then tp1.atoms + else tp1.atoms | tp2.atoms + + private def computeWidenSingletons()(using Context): Type = + val tp1w = tp1.widenSingletons + val tp2w = tp2.widenSingletons + if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) + private def ensureAtomsComputed()(using Context): Unit = if atomsRunId != ctx.runId then - myAtoms = - if tp1.hasClassSymbol(defn.NothingClass) then tp2.atoms - else if tp2.hasClassSymbol(defn.NothingClass) then tp1.atoms - else tp1.atoms | tp2.atoms - val tp1w = tp1.widenSingletons - val tp2w = tp2.widenSingletons - myWidened = if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) - atomsRunId = ctx.runId + myAtoms = computeAtoms() + myWidened = computeWidenSingletons() + if !isProvisional then atomsRunId = ctx.runId override def atoms(using Context): Atoms = ensureAtomsComputed() myAtoms - override def widenSingletons(using Context): Type = { + override def widenSingletons(using Context): Type = ensureAtomsComputed() myWidened - } def derivedOrType(tp1: Type, tp2: Type, soft: Boolean = isSoft)(using Context): Type = if ((tp1 eq this.tp1) && (tp2 eq this.tp2) && soft == isSoft) this @@ -3461,6 +3500,9 @@ object Types { object OrType { def apply(tp1: Type, tp2: Type, soft: Boolean)(using Context): OrType = { + def where = i"in union $tp1 | $tp2" + expectValueTypeOrWildcard(tp1, where) + expectValueTypeOrWildcard(tp2, where) assertUnerased() unique(new CachedOrType(tp1, tp2, soft)) } @@ -3491,6 +3533,11 @@ object Types { TypeComparer.liftIfHK(tp1, tp2, OrType(_, _, soft = true), makeHk, _ & _) } + def expectValueTypeOrWildcard(tp: Type, where: => String)(using Context): Unit = + if !tp.isValueTypeOrWildcard then + assert(!ctx.isAfterTyper, where) // we check correct kinds at PostTyper + throw TypeError(em"$tp is not a value type, cannot be used $where") + /** An extractor object to pattern match against a nullable union. * e.g. * @@ -3601,6 +3648,8 @@ object Types { def companion: LambdaTypeCompanion[ThisName, PInfo, This] + def erasedParams(using Context) = List.fill(paramInfos.size)(false) + /** The type `[tparams := paramRefs] tp`, where `tparams` can be * either a list of type parameter symbols or a list of lambda parameters * @@ -3678,7 +3727,11 @@ object Types { else Signature(tp, sourceLanguage) this match case tp: MethodType => - val params = if (isErasedMethod) Nil else tp.paramInfos + val params = if (hasErasedParams) + tp.paramInfos + .zip(tp.erasedParams) + .collect { case (param, isErased) if !isErased => param } + else tp.paramInfos resultSignature.prependTermParams(params, sourceLanguage) case tp: PolyType => resultSignature.prependTypeParams(tp.paramNames.length) @@ -3838,7 +3891,8 @@ object Types { /** Does one of the parameter types contain references to earlier parameters * of this method type which cannot be eliminated by de-aliasing? */ - def isParamDependent(using Context): Boolean = paramDependencyStatus == TrueDeps + def isParamDependent(using Context): Boolean = + paramDependencyStatus == TrueDeps || paramDependencyStatus == CaptureDeps /** Is there a dependency involving a reference in a capture set, but * otherwise no true result dependency? @@ -3885,16 +3939,14 @@ object Types { def companion: MethodTypeCompanion final override def isImplicitMethod: Boolean = - companion.eq(ImplicitMethodType) || - companion.eq(ErasedImplicitMethodType) || - isContextualMethod - final override def isErasedMethod: Boolean = - companion.eq(ErasedMethodType) || - companion.eq(ErasedImplicitMethodType) || - companion.eq(ErasedContextualMethodType) + companion.eq(ImplicitMethodType) || isContextualMethod + final override def hasErasedParams(using Context): Boolean = + erasedParams.contains(true) final override def isContextualMethod: Boolean = - companion.eq(ContextualMethodType) || - companion.eq(ErasedContextualMethodType) + companion.eq(ContextualMethodType) + + override def erasedParams(using Context): List[Boolean] = + paramInfos.map(p => p.hasAnnotation(defn.ErasedParamAnnot)) protected def prefixString: String = companion.prefixString } @@ -3920,10 +3972,15 @@ object Types { protected def toPInfo(tp: Type)(using Context): PInfo + /** If `tparam` is a sealed type parameter symbol of a polymorphic method, add + * a @caps.Sealed annotation to the upperbound in `tp`. + */ + protected def addSealed(tparam: ParamInfo, tp: Type)(using Context): Type = tp + def fromParams[PI <: ParamInfo.Of[N]](params: List[PI], resultType: Type)(using Context): Type = if (params.isEmpty) resultType else apply(params.map(_.paramName))( - tl => params.map(param => toPInfo(tl.integrate(params, param.paramInfo))), + tl => params.map(param => toPInfo(addSealed(param, tl.integrate(params, param.paramInfo)))), tl => tl.integrate(params, resultType)) } @@ -3948,29 +4005,50 @@ object Types { * and inline parameters: * - replace @repeated annotations on Seq or Array types by types * - add @inlineParam to inline parameters + * - add @erasedParam to erased parameters + * - wrap types of parameters that have an @allowConversions annotation with Into[_] */ - def fromSymbols(params: List[Symbol], resultType: Type)(using Context): MethodType = { - def translateInline(tp: Type): Type = tp match { - case ExprType(resType) => ExprType(AnnotatedType(resType, Annotation(defn.InlineParamAnnot))) - case _ => AnnotatedType(tp, Annotation(defn.InlineParamAnnot)) - } - def translateErased(tp: Type): Type = tp match { - case ExprType(resType) => ExprType(AnnotatedType(resType, Annotation(defn.ErasedParamAnnot))) - case _ => AnnotatedType(tp, Annotation(defn.ErasedParamAnnot)) - } - def paramInfo(param: Symbol) = { + def fromSymbols(params: List[Symbol], resultType: Type)(using Context): MethodType = + def addAnnotation(tp: Type, cls: ClassSymbol, param: Symbol): Type = tp match + case ExprType(resType) => ExprType(addAnnotation(resType, cls, param)) + case _ => AnnotatedType(tp, Annotation(cls, param.span)) + + def wrapConvertible(tp: Type) = + AppliedType(defn.IntoType.typeRef, tp :: Nil) + + /** Add `Into[..] to the type itself and if it is a function type, to all its + * curried result type(s) as well. + */ + def addInto(tp: Type): Type = tp match + case tp @ AppliedType(tycon, args) if tycon.typeSymbol == defn.RepeatedParamClass => + tp.derivedAppliedType(tycon, addInto(args.head) :: Nil) + case tp @ AppliedType(tycon, args) if defn.isFunctionType(tp) => + wrapConvertible(tp.derivedAppliedType(tycon, args.init :+ addInto(args.last))) + case tp @ RefinedType(parent, rname, rinfo) if defn.isFunctionOrPolyType(tp) => + wrapConvertible(tp.derivedRefinedType(parent, rname, addInto(rinfo))) + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = addInto(tp.resType)) + case ExprType(resType) => + ExprType(addInto(resType)) + case _ => + wrapConvertible(tp) + + def paramInfo(param: Symbol) = var paramType = param.info.annotatedToRepeated - if (param.is(Inline)) paramType = translateInline(paramType) - if (param.is(Erased)) paramType = translateErased(paramType) + if param.is(Inline) then + paramType = addAnnotation(paramType, defn.InlineParamAnnot, param) + if param.is(Erased) then + paramType = addAnnotation(paramType, defn.ErasedParamAnnot, param) + if param.hasAnnotation(defn.AllowConversionsAnnot) then + paramType = addInto(paramType) paramType - } apply(params.map(_.name.asTermName))( tl => params.map(p => tl.integrate(params, paramInfo(p))), tl => tl.integrate(params, resultType)) - } + end fromSymbols - final def apply(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type)(using Context): MethodType = + def apply(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type)(using Context): MethodType = checkValid(unique(new CachedMethodType(paramNames)(paramInfosExp, resultTypeExp, self))) def checkValid(mt: MethodType)(using Context): mt.type = { @@ -3985,19 +4063,14 @@ object Types { } object MethodType extends MethodTypeCompanion("MethodType") { - def companion(isContextual: Boolean = false, isImplicit: Boolean = false, isErased: Boolean = false): MethodTypeCompanion = - if (isContextual) - if (isErased) ErasedContextualMethodType else ContextualMethodType - else if (isImplicit) - if (isErased) ErasedImplicitMethodType else ImplicitMethodType - else - if (isErased) ErasedMethodType else MethodType + def companion(isContextual: Boolean = false, isImplicit: Boolean = false): MethodTypeCompanion = + if (isContextual) ContextualMethodType + else if (isImplicit) ImplicitMethodType + else MethodType } - object ErasedMethodType extends MethodTypeCompanion("ErasedMethodType") + object ContextualMethodType extends MethodTypeCompanion("ContextualMethodType") - object ErasedContextualMethodType extends MethodTypeCompanion("ErasedContextualMethodType") object ImplicitMethodType extends MethodTypeCompanion("ImplicitMethodType") - object ErasedImplicitMethodType extends MethodTypeCompanion("ErasedImplicitMethodType") /** A ternary extractor for MethodType */ object MethodTpe { @@ -4229,6 +4302,16 @@ object Types { resultTypeExp: PolyType => Type)(using Context): PolyType = unique(new PolyType(paramNames)(paramInfosExp, resultTypeExp)) + override protected def addSealed(tparam: ParamInfo, tp: Type)(using Context): Type = + tparam match + case tparam: Symbol if tparam.is(Sealed) => + tp match + case tp @ TypeBounds(lo, hi) => + tp.derivedTypeBounds(lo, + AnnotatedType(hi, Annotation(defn.Caps_SealedAnnot, tparam.span))) + case _ => tp + case _ => tp + def unapply(tl: PolyType): Some[(List[LambdaParam], Type)] = Some((tl.typeParams, tl.resType)) } @@ -4741,7 +4824,7 @@ object Types { def hasLowerBound(using Context): Boolean = !currentEntry.loBound.isExactlyNothing /** For uninstantiated type variables: Is the upper bound different from Any? */ - def hasUpperBound(using Context): Boolean = !currentEntry.hiBound.isRef(defn.AnyClass) + def hasUpperBound(using Context): Boolean = !currentEntry.hiBound.isTopOfSomeKind /** Unwrap to instance (if instantiated) or origin (if not), until result * is no longer a TypeVar @@ -4933,9 +5016,9 @@ object Types { if (!givenSelf.isValueType) appliedRef else if (clsd.is(Module)) givenSelf else if (ctx.erasedTypes) appliedRef - else givenSelf match - case givenSelf @ EventuallyCapturingType(tp, _) => - givenSelf.derivedAnnotatedType(tp & appliedRef, givenSelf.annot) + else givenSelf.dealiasKeepAnnots match + case givenSelf1 @ EventuallyCapturingType(tp, _) => + givenSelf1.derivedAnnotatedType(tp & appliedRef, givenSelf1.annot) case _ => AndType(givenSelf, appliedRef) } @@ -5184,6 +5267,10 @@ object Types { else result def emptyPolyKind(using Context): TypeBounds = apply(defn.NothingType, defn.AnyKindType) + /** An interval covering all types of the same kind as `tp`. */ + def emptySameKindAs(tp: Type)(using Context): TypeBounds = + val top = tp.topType + if top.isExactlyAny then empty else apply(defn.NothingType, top) def upper(hi: Type)(using Context): TypeBounds = apply(defn.NothingType, hi) def lower(lo: Type)(using Context): TypeBounds = apply(lo, defn.AnyType) } @@ -5212,7 +5299,10 @@ object Types { override def stripTypeVar(using Context): Type = derivedAnnotatedType(parent.stripTypeVar, annot) - override def stripAnnots(using Context): Type = parent.stripAnnots + override def stripAnnots(keep: Annotation => (Context) ?=> Boolean)(using Context): Type = + val p = parent.stripAnnots(keep) + if keep(annot) then derivedAnnotatedType(p, annot) + else p override def stripped(using Context): Type = parent.stripped @@ -5291,7 +5381,12 @@ object Types { abstract class FlexType extends UncachedGroundType with ValueType abstract class ErrorType extends FlexType { + + /** An explanation of the cause of the failure */ def msg(using Context): Message + + /** An explanation of the cause of the failure as a string */ + def explanation(using Context): String = msg.message } object ErrorType: @@ -5299,18 +5394,16 @@ object Types { val et = new PreviousErrorType ctx.base.errorTypeMsg(et) = m et - def apply(s: => String)(using Context): ErrorType = - apply(s.toMessage) end ErrorType class PreviousErrorType extends ErrorType: def msg(using Context): Message = ctx.base.errorTypeMsg.get(this) match case Some(m) => m - case None => "error message from previous run no longer available".toMessage + case None => em"error message from previous run no longer available" object UnspecifiedErrorType extends ErrorType { - override def msg(using Context): Message = "unspecified error".toMessage + override def msg(using Context): Message = em"unspecified error" } /* Type used to track Select nodes that could not resolve a member and their qualifier is a scala.Dynamic. */ @@ -5356,6 +5449,9 @@ object Types { else result else unique(CachedWildcardType(bounds)) + /** A wildcard matching any type of the same kind as `tp`. */ + def sameKindAs(tp: Type)(using Context): WildcardType = + apply(TypeBounds.emptySameKindAs(tp)) } /** An extractor for single abstract method types. @@ -5497,6 +5593,14 @@ object Types { stop == StopAt.Static && tp.currentSymbol.isStatic && isStaticPrefix(tp.prefix) || stop == StopAt.Package && tp.currentSymbol.is(Package) } + + /** The type parameters of the constructor of this applied type. + * Overridden in OrderingConstraint's ConstraintAwareTraversal to take account + * of instantiations in the constraint that are not yet propagated to the + * instance types of type variables. + */ + protected def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = + tp.tyconTypeParams end VariantTraversal /** A supertrait for some typemaps that are bijections. Used for capture checking. @@ -5604,17 +5708,11 @@ object Types { case tp: NamedType => if stopBecauseStaticOrLocal(tp) then tp else - val prefix1 = atVariance(variance max 0)(this(tp.prefix)) - // A prefix is never contravariant. Even if say `p.A` is used in a contravariant - // context, we cannot assume contravariance for `p` because `p`'s lower - // bound might not have a binding for `A` (e.g. the lower bound could be `Nothing`). - // By contrast, covariance does translate to the prefix, since we have that - // if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member - // of `p`'s upper bound. + val prefix1 = atVariance(variance max 0)(this(tp.prefix)) // see comment of TypeAccumulator's applyToPrefix derivedSelect(tp, prefix1) case tp: AppliedType => - derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tp.tyconTypeParams)) + derivedAppliedType(tp, this(tp.tycon), mapArgs(tp.args, tyconTypeParams(tp))) case tp: LambdaType => mapOverLambda(tp) @@ -5662,6 +5760,12 @@ object Types { case tp @ SuperType(thistp, supertp) => derivedSuperType(tp, this(thistp), this(supertp)) + case tp @ ConstantType(const @ Constant(_: Type)) => + val classType = const.tpe + val classType1 = this(classType) + if classType eq classType1 then tp + else classType1 + case tp: LazyRef => LazyRef { refCtx => given Context = refCtx @@ -5941,7 +6045,7 @@ object Types { case nil => true } - if (distributeArgs(args, tp.tyconTypeParams)) + if (distributeArgs(args, tyconTypeParams(tp))) range(tp.derivedAppliedType(tycon, loBuf.toList), tp.derivedAppliedType(tycon, hiBuf.toList)) else if tycon.isLambdaSub || args.exists(isRangeOfNonTermTypes) then @@ -6025,8 +6129,11 @@ object Types { tp.derivedLambdaType(tp.paramNames, formals, restpe) } + /** Overridden in TypeOps.avoid */ + protected def needsRangeIfInvariant(refs: CaptureSet): Boolean = true + override def mapCapturingType(tp: Type, parent: Type, refs: CaptureSet, v: Int): Type = - if v == 0 then + if v == 0 && needsRangeIfInvariant(refs) then range(mapCapturingType(tp, parent, refs, -1), mapCapturingType(tp, parent, refs, 1)) else super.mapCapturingType(tp, parent, refs, v) @@ -6037,14 +6144,10 @@ object Types { /** A range of possible types between lower bound `lo` and upper bound `hi`. * Only used internally in `ApproximatingTypeMap`. */ - case class Range(lo: Type, hi: Type) extends UncachedGroundType { + case class Range(lo: Type, hi: Type) extends UncachedGroundType: assert(!lo.isInstanceOf[Range]) assert(!hi.isInstanceOf[Range]) - override def toText(printer: Printer): Text = - lo.toText(printer) ~ ".." ~ hi.toText(printer) - } - /** Approximate wildcards by their bounds */ class AvoidWildcardsMap(using Context) extends ApproximatingTypeMap: protected def mapWild(t: WildcardType) = @@ -6063,8 +6166,17 @@ object Types { protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations - protected final def applyToPrefix(x: T, tp: NamedType): T = - atVariance(variance max 0)(this(x, tp.prefix)) // see remark on NamedType case in TypeMap + /** A prefix is never contravariant. Even if say `p.A` is used in a contravariant + * context, we cannot assume contravariance for `p` because `p`'s lower + * bound might not have a binding for `A`, since the lower bound could be `Nothing`. + * By contrast, covariance does translate to the prefix, since we have that + * if `p <: q` then `p.A <: q.A`, and well-formedness requires that `A` is a member + * of `p`'s upper bound. + * Overridden in OrderingConstraint's ConstraintAwareTraversal, where a + * more relaxed scheme is used. + */ + protected def applyToPrefix(x: T, tp: NamedType): T = + atVariance(variance max 0)(this(x, tp.prefix)) def foldOver(x: T, tp: Type): T = { record(s"foldOver $getClass") @@ -6087,7 +6199,7 @@ object Types { } foldArgs(acc, tparams.tail, args.tail) } - foldArgs(this(x, tycon), tp.tyconTypeParams, args) + foldArgs(this(x, tycon), tyconTypeParams(tp), args) case _: BoundType | _: ThisType => x diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala index 3b05ee351b86..4aa60d973264 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala @@ -346,6 +346,7 @@ object ClassfileConstants { case JAVA_ACC_ENUM => Enum case JAVA_ACC_ABSTRACT => if (isClass) Abstract else Deferred case JAVA_ACC_INTERFACE => PureInterfaceCreationFlags | JavaDefined + case JAVA_ACC_ANNOTATION => JavaAnnotation case _ => EmptyFlags } @@ -353,18 +354,16 @@ object ClassfileConstants { if (jflag == 0) base else base | translateFlag(jflag) private def translateFlags(jflags: Int, baseFlags: FlagSet): FlagSet = { - val nflags = - if ((jflags & JAVA_ACC_ANNOTATION) == 0) jflags - else jflags & ~(JAVA_ACC_ABSTRACT | JAVA_ACC_INTERFACE) // annotations are neither abstract nor interfaces var res: FlagSet = baseFlags | JavaDefined - res = addFlag(res, nflags & JAVA_ACC_PRIVATE) - res = addFlag(res, nflags & JAVA_ACC_PROTECTED) - res = addFlag(res, nflags & JAVA_ACC_FINAL) - res = addFlag(res, nflags & JAVA_ACC_SYNTHETIC) - res = addFlag(res, nflags & JAVA_ACC_STATIC) - res = addFlag(res, nflags & JAVA_ACC_ENUM) - res = addFlag(res, nflags & JAVA_ACC_ABSTRACT) - res = addFlag(res, nflags & JAVA_ACC_INTERFACE) + res = addFlag(res, jflags & JAVA_ACC_PRIVATE) + res = addFlag(res, jflags & JAVA_ACC_PROTECTED) + res = addFlag(res, jflags & JAVA_ACC_FINAL) + res = addFlag(res, jflags & JAVA_ACC_SYNTHETIC) + res = addFlag(res, jflags & JAVA_ACC_STATIC) + res = addFlag(res, jflags & JAVA_ACC_ENUM) + res = addFlag(res, jflags & JAVA_ACC_ABSTRACT) + res = addFlag(res, jflags & JAVA_ACC_INTERFACE) + res = addFlag(res, jflags & JAVA_ACC_ANNOTATION) res } diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 4763cd25ff41..0c701eb03d38 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -165,11 +165,7 @@ class ClassfileParser( * Updates the read pointer of 'in'. */ def parseParents: List[Type] = { val superType = - if (isAnnotation) { - in.nextChar - defn.AnnotationClass.typeRef - } - else if (classRoot.symbol == defn.ComparableClass || + if (classRoot.symbol == defn.ComparableClass || classRoot.symbol == defn.JavaCloneableClass || classRoot.symbol == defn.JavaSerializableClass) { // Treat these interfaces as universal traits @@ -186,7 +182,6 @@ class ClassfileParser( // Consequently, no best implicit for the "Integral" evidence parameter of "range" // is found. Previously, this worked because of weak conformance, which has been dropped. - if (isAnnotation) ifaces = defn.ClassfileAnnotationClass.typeRef :: ifaces superType :: ifaces } @@ -331,7 +326,7 @@ class ClassfileParser( if (isEnum) { val enumClass = sym.owner.linkedClass if (!enumClass.exists) - report.warning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.") + report.warning(em"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.") else { if (!enumClass.is(Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed) enumClass.addAnnotation(Annotation.Child(sym, NoSpan)) @@ -661,7 +656,7 @@ class ClassfileParser( case tp: TypeRef if tp.denot.infoOrCompleter.isInstanceOf[StubInfo] => // Silently ignore missing annotation classes like javac if ctx.debug then - report.warning(i"Error while parsing annotations in ${classfile}: annotation class $tp not present on classpath") + report.warning(em"Error while parsing annotations in ${classfile}: annotation class $tp not present on classpath") None case _ => if (hasError || skip) None @@ -676,7 +671,7 @@ class ClassfileParser( // the classpath would *not* end up here. A class not found is signaled // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example), // and that should never be swallowed silently. - report.warning("Caught: " + ex + " while parsing annotations in " + classfile) + report.warning(em"Caught: $ex while parsing annotations in $classfile") if (ctx.debug) ex.printStackTrace() None // ignore malformed annotations @@ -758,18 +753,20 @@ class ClassfileParser( case tpnme.ConstantValueATTR => val c = pool.getConstant(in.nextChar) if (c ne null) res.constant = c - else report.warning(s"Invalid constant in attribute of ${sym.showLocated} while parsing ${classfile}") + else report.warning(em"Invalid constant in attribute of ${sym.showLocated} while parsing ${classfile}") case tpnme.MethodParametersATTR => val paramCount = in.nextByte for i <- 0 until paramCount do - val name = pool.getName(in.nextChar) + val index = in.nextChar val flags = in.nextChar - if (flags & JAVA_ACC_SYNTHETIC) == 0 then - res.namedParams += (i -> name.name) + if index != 0 then + val name = pool.getName(index) + if (flags & JAVA_ACC_SYNTHETIC) == 0 then + res.namedParams += (i -> name.name) case tpnme.AnnotationDefaultATTR => - sym.addAnnotation(Annotation(defn.AnnotationDefaultAnnot, Nil)) + sym.addAnnotation(Annotation(defn.AnnotationDefaultAnnot, Nil, sym.span)) // Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME case tpnme.RuntimeVisibleAnnotationATTR @@ -845,7 +842,7 @@ class ClassfileParser( class AnnotConstructorCompleter(classInfo: TempClassInfoType) extends LazyType { def complete(denot: SymDenotation)(using Context): Unit = { - val attrs = classInfo.decls.toList.filter(sym => sym.isTerm && sym != denot.symbol) + val attrs = classInfo.decls.toList.filter(sym => sym.isTerm && sym != denot.symbol && sym.name != nme.CONSTRUCTOR) val paramNames = attrs.map(_.name.asTermName) val paramTypes = attrs.map(_.info.resultType) denot.info = MethodType(paramNames, paramTypes, classRoot.typeRef) @@ -972,7 +969,7 @@ class ClassfileParser( } } else { - report.error(s"Could not find $path in ${classfile.underlyingSource}") + report.error(em"Could not find $path in ${classfile.underlyingSource}") Array.empty } case _ => @@ -980,7 +977,7 @@ class ClassfileParser( val name = classfile.name.stripSuffix(".class") + ".tasty" val tastyFileOrNull = dir.lookupName(name, false) if (tastyFileOrNull == null) { - report.error(s"Could not find TASTY file $name under $dir") + report.error(em"Could not find TASTY file $name under $dir") Array.empty } else tastyFileOrNull.toByteArray @@ -1091,10 +1088,10 @@ class ClassfileParser( if (sym == classRoot.symbol) staticScope.lookup(name) else { - var module = sym.companionModule - if (!module.exists && sym.isAbsent()) - module = sym.scalacLinkedClass - module.info.member(name).symbol + var moduleClass = sym.registeredCompanion + if (!moduleClass.exists && sym.isAbsent()) + moduleClass = sym.scalacLinkedClass + moduleClass.info.member(name).symbol } else if (sym == classRoot.symbol) instanceScope.lookup(name) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala index df3e4df497f8..fde6c669045d 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala @@ -9,36 +9,43 @@ import dotty.tools.tasty.TastyFormat.CommentsSection import java.nio.charset.StandardCharsets -class CommentPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Addr, docString: untpd.MemberDef => Option[Comment]): - private val buf = new TastyBuffer(5000) - pickler.newSection(CommentsSection, buf) - - def pickleComment(root: tpd.Tree): Unit = traverse(root) - - private def pickleComment(addr: Addr, comment: Comment): Unit = - if addr != NoAddr then - val bytes = comment.raw.getBytes(StandardCharsets.UTF_8).nn - val length = bytes.length - buf.writeAddr(addr) - buf.writeNat(length) - buf.writeBytes(bytes, length) - buf.writeLongInt(comment.span.coords) - - private def traverse(x: Any): Unit = x match - case x: untpd.Tree @unchecked => - x match - case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d. - for comment <- docString(x) do pickleComment(addrOfTree(x), comment) - case _ => - val limit = x.productArity - var n = 0 - while n < limit do - traverse(x.productElement(n)) - n += 1 - case y :: ys => - traverse(y) - traverse(ys) - case _ => - +object CommentPickler: + + def pickleComments( + pickler: TastyPickler, + addrOfTree: PositionPickler.TreeToAddr, + docString: untpd.MemberDef => Option[Comment], + root: tpd.Tree, + buf: TastyBuffer = new TastyBuffer(5000)): Unit = + + pickler.newSection(CommentsSection, buf) + + def pickleComment(addr: Addr, comment: Comment): Unit = + if addr != NoAddr then + val bytes = comment.raw.getBytes(StandardCharsets.UTF_8).nn + val length = bytes.length + buf.writeAddr(addr) + buf.writeNat(length) + buf.writeBytes(bytes, length) + buf.writeLongInt(comment.span.coords) + + def traverse(x: Any): Unit = x match + case x: untpd.Tree @unchecked => + x match + case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d. + for comment <- docString(x) do pickleComment(addrOfTree(x), comment) + case _ => + val limit = x.productArity + var n = 0 + while n < limit do + traverse(x.productElement(n)) + n += 1 + case y :: ys => + traverse(y) + traverse(ys) + case _ => + + traverse(root) + end pickleComments end CommentPickler diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala index 623508780325..1ddcf9afe1dc 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala @@ -49,9 +49,12 @@ class NameBuffer extends TastyBuffer(10000) { } } - private def withLength(op: => Unit, lengthWidth: Int = 1): Unit = { + private inline def withLength(inline op: Unit, lengthWidth: Int = 1): Unit = { val lengthAddr = currentAddr - for (i <- 0 until lengthWidth) writeByte(0) + var i = 0 + while i < lengthWidth do + writeByte(0) + i += 1 op val length = currentAddr.index - lengthAddr.index - lengthWidth putNat(lengthAddr, length, lengthWidth) @@ -111,11 +114,11 @@ class NameBuffer extends TastyBuffer(10000) { override def assemble(): Unit = { var i = 0 - for ((name, ref) <- nameRefs) { + for (name, ref) <- nameRefs do + val ref = nameRefs(name) assert(ref.index == i) i += 1 pickleNameContents(name) - } } } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala index ad0c051e1b7b..924b87bec003 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala @@ -8,32 +8,40 @@ import dotty.tools.tasty.TastyBuffer import TastyBuffer._ import ast._ -import Trees.WithLazyField +import Trees.WithLazyFields import util.{SourceFile, NoSource} import core._ import Annotations._, Decorators._ import collection.mutable import util.Spans._ +import reporting.Message -class PositionPickler( - pickler: TastyPickler, - addrOfTree: PositionPickler.TreeToAddr, - treeAnnots: untpd.MemberDef => List[tpd.Tree], - relativePathReference: String){ - +object PositionPickler: import ast.tpd._ - val buf: TastyBuffer = new TastyBuffer(5000) - pickler.newSection(PositionsSection, buf) - - private val pickledIndices = new mutable.BitSet + // Note: This could be just TreeToAddr => Addr if functions are specialized to value classes. + // We use a SAM type to avoid boxing of Addr + @FunctionalInterface + trait TreeToAddr: + def apply(x: untpd.Tree): Addr - def header(addrDelta: Int, hasStartDelta: Boolean, hasEndDelta: Boolean, hasPoint: Boolean): Int = { + def header(addrDelta: Int, hasStartDelta: Boolean, hasEndDelta: Boolean, hasPoint: Boolean): Int = def toInt(b: Boolean) = if (b) 1 else 0 (addrDelta << 3) | (toInt(hasStartDelta) << 2) | (toInt(hasEndDelta) << 1) | toInt(hasPoint) - } - def picklePositions(source: SourceFile, roots: List[Tree], warnings: mutable.ListBuffer[String]): Unit = { + def picklePositions( + pickler: TastyPickler, + addrOfTree: TreeToAddr, + treeAnnots: untpd.MemberDef => List[tpd.Tree], + relativePathReference: String, + source: SourceFile, + roots: List[Tree], + warnings: mutable.ListBuffer[Message], + buf: TastyBuffer = new TastyBuffer(5000), + pickledIndices: mutable.BitSet = new mutable.BitSet) = + + pickler.newSection(PositionsSection, buf) + /** Pickle the number of lines followed by the length of each line */ def pickleLineOffsets(): Unit = { val content = source.content() @@ -79,7 +87,7 @@ class PositionPickler( def alwaysNeedsPos(x: Positioned) = x match { case // initialSpan is inaccurate for trees with lazy field - _: WithLazyField[?] + _: WithLazyFields // A symbol is created before the corresponding tree is unpickled, // and its position cannot be changed afterwards. @@ -128,10 +136,6 @@ class PositionPickler( } for (root <- roots) traverse(root, NoSource) - } -} -object PositionPickler: - // Note: This could be just TreeToAddr => Addr if functions are specialized to value classes. - // We use a SAM type to avoid boxing of Addr - @FunctionalInterface trait TreeToAddr: - def apply(x: untpd.Tree): Addr + end picklePositions +end PositionPickler + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala b/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala new file mode 100644 index 000000000000..b36c78a77ac6 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala @@ -0,0 +1,20 @@ +package dotty.tools.dotc.core.tasty +import dotty.tools.tasty.TastyBuffer +import collection.mutable +import java.util.Arrays + +class ScratchData: + var delta, delta1 = new Array[Int](0) + + val positionBuffer = new TastyBuffer(5000) + val pickledIndices = new mutable.BitSet + + val commentBuffer = new TastyBuffer(5000) + + def reset() = + assert(delta ne delta1) + assert(delta.length == delta1.length) + positionBuffer.reset() + pickledIndices.clear() + commentBuffer.reset() + diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala index aa657c393815..4f1e84ac9184 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala @@ -38,8 +38,9 @@ class TastyPickler(val rootCls: ClassSymbol) { nameBuffer.assemble() sections.foreach(_._2.assemble()) - val nameBufferHash = TastyHash.pjwHash64(nameBuffer.bytes) - val treeSectionHash +: otherSectionHashes = sections.map(x => TastyHash.pjwHash64(x._2.bytes)): @unchecked + val nameBufferHash = TastyHash.pjwHash64(nameBuffer.bytes, nameBuffer.length) + val treeSectionHash +: otherSectionHashes = + sections.map(x => TastyHash.pjwHash64(x._2.bytes, x._2.length)): @unchecked // Hash of name table and tree val uuidLow: Long = nameBufferHash ^ treeSectionHash diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala index a3dedaaec685..d0f08379c114 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala @@ -10,6 +10,7 @@ import TastyBuffer.{Addr, NoAddr, AddrWidth} import util.Util.bestFit import config.Printers.pickling import ast.untpd.Tree +import java.util.Arrays class TreeBuffer extends TastyBuffer(50000) { @@ -17,7 +18,6 @@ class TreeBuffer extends TastyBuffer(50000) { private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets) private var offsets = new Array[Int](initialOffsetSize) private var isRelative = new Array[Boolean](initialOffsetSize) - private var delta: Array[Int] = _ private var numOffsets = 0 /** A map from trees to the address at which a tree is pickled. */ @@ -68,109 +68,119 @@ class TreeBuffer extends TastyBuffer(50000) { } /** The amount by which the bytes at the given address are shifted under compression */ - def deltaAt(at: Addr): Int = { + def deltaAt(at: Addr, scratch: ScratchData): Int = { val idx = bestFit(offsets, numOffsets, at.index - 1) - if (idx < 0) 0 else delta(idx) + if (idx < 0) 0 else scratch.delta(idx) } /** The address to which `x` is translated under compression */ - def adjusted(x: Addr): Addr = x - deltaAt(x) + def adjusted(x: Addr, scratch: ScratchData): Addr = x - deltaAt(x, scratch) - /** Compute all shift-deltas */ - private def computeDeltas() = { - delta = new Array[Int](numOffsets) - var lastDelta = 0 - var i = 0 - while (i < numOffsets) { - val off = offset(i) - val skippedOff = skipZeroes(off) - val skippedCount = skippedOff.index - off.index - assert(skippedCount < AddrWidth, s"unset field at position $off") - lastDelta += skippedCount - delta(i) = lastDelta - i += 1 - } - } + /** Final assembly, involving the following steps: + * - compute deltas + * - adjust deltas until additional savings are < 1% of total + * - adjust offsets according to the adjusted deltas + * - shrink buffer, skipping zeroes. + */ + def compactify(scratch: ScratchData): Unit = - /** The absolute or relative adjusted address at index `i` of `offsets` array*/ - private def adjustedOffset(i: Int): Addr = { - val at = offset(i) - val original = getAddr(at) - if (isRelative(i)) { - val start = skipNat(at) - val len1 = original + delta(i) - deltaAt(original + start.index) - val len2 = adjusted(original + start.index) - adjusted(start).index - assert(len1 == len2, - s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2") - len1 + def reserve(arr: Array[Int]) = + if arr.length < numOffsets then + new Array[Int](numOffsets) + else + Arrays.fill(arr, 0, numOffsets, 0) + arr + + /** Compute all shift-deltas */ + def computeDeltas() = { + scratch.delta = reserve(scratch.delta) + var lastDelta = 0 + var i = 0 + while (i < numOffsets) { + val off = offset(i) + val skippedOff = skipZeroes(off) + val skippedCount = skippedOff.index - off.index + assert(skippedCount < AddrWidth, s"unset field at position $off") + lastDelta += skippedCount + scratch.delta(i) = lastDelta + i += 1 + } } - else adjusted(original) - } - /** Adjust all offsets according to previously computed deltas */ - private def adjustOffsets(): Unit = - for (i <- 0 until numOffsets) { - val corrected = adjustedOffset(i) - fillAddr(offset(i), corrected) + /** The absolute or relative adjusted address at index `i` of `offsets` array*/ + def adjustedOffset(i: Int): Addr = { + val at = offset(i) + val original = getAddr(at) + if (isRelative(i)) { + val start = skipNat(at) + val len1 = original + scratch.delta(i) - deltaAt(original + start.index, scratch) + val len2 = adjusted(original + start.index, scratch) - adjusted(start, scratch).index + assert(len1 == len2, + s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2") + len1 + } + else adjusted(original, scratch) } - /** Adjust deltas to also take account references that will shrink (and thereby - * generate additional zeroes that can be skipped) due to previously - * computed adjustments. - */ - private def adjustDeltas(): Int = { - val delta1 = new Array[Int](delta.length) - var lastDelta = 0 - var i = 0 - while (i < numOffsets) { - val corrected = adjustedOffset(i) - lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index) - delta1(i) = lastDelta - i += 1 + /** Adjust all offsets according to previously computed deltas */ + def adjustOffsets(): Unit = + var i = 0 + while i < numOffsets do + val corrected = adjustedOffset(i) + fillAddr(offset(i), corrected) + i += 1 + + /** Adjust deltas to also take account references that will shrink (and thereby + * generate additional zeroes that can be skipped) due to previously + * computed adjustments. + */ + def adjustDeltas(): Int = { + scratch.delta1 = reserve(scratch.delta1) + var lastDelta = 0 + var i = 0 + while i < numOffsets do + val corrected = adjustedOffset(i) + lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index) + scratch.delta1(i) = lastDelta + i += 1 + val saved = + if (numOffsets == 0) 0 + else scratch.delta1(numOffsets - 1) - scratch.delta(numOffsets - 1) + val tmp = scratch.delta + scratch.delta = scratch.delta1 + scratch.delta1 = tmp + saved } - val saved = - if (numOffsets == 0) 0 - else delta1(numOffsets - 1) - delta(numOffsets - 1) - delta = delta1 - saved - } - /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */ - private def compress(): Int = { - var lastDelta = 0 - var start = 0 - var i = 0 - var wasted = 0 - def shift(end: Int) = - System.arraycopy(bytes, start, bytes, start - lastDelta, end - start) - while (i < numOffsets) { - val next = offsets(i) - shift(next) - start = next + delta(i) - lastDelta - val pastZeroes = skipZeroes(Addr(next)).index - assert(pastZeroes >= start, s"something's wrong: eliminated non-zero") - wasted += (pastZeroes - start) - lastDelta = delta(i) - i += 1 + /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */ + def compress(): Int = { + var lastDelta = 0 + var start = 0 + var i = 0 + var wasted = 0 + def shift(end: Int) = + System.arraycopy(bytes, start, bytes, start - lastDelta, end - start) + while (i < numOffsets) { + val next = offsets(i) + shift(next) + start = next + scratch.delta(i) - lastDelta + val pastZeroes = skipZeroes(Addr(next)).index + assert(pastZeroes >= start, s"something's wrong: eliminated non-zero") + wasted += (pastZeroes - start) + lastDelta = scratch.delta(i) + i += 1 + } + shift(length) + length -= lastDelta + wasted } - shift(length) - length -= lastDelta - wasted - } - def adjustTreeAddrs(): Unit = - var i = 0 - while i < treeAddrs.size do - treeAddrs.setValue(i, adjusted(Addr(treeAddrs.value(i))).index) - i += 1 + def adjustTreeAddrs(): Unit = + var i = 0 + while i < treeAddrs.size do + treeAddrs.setValue(i, adjusted(Addr(treeAddrs.value(i)), scratch).index) + i += 1 - /** Final assembly, involving the following steps: - * - compute deltas - * - adjust deltas until additional savings are < 1% of total - * - adjust offsets according to the adjusted deltas - * - shrink buffer, skipping zeroes. - */ - def compactify(): Unit = { val origLength = length computeDeltas() //println(s"offsets: ${offsets.take(numOffsets).deep}") @@ -185,5 +195,5 @@ class TreeBuffer extends TastyBuffer(50000) { adjustTreeAddrs() val wasted = compress() pickling.println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now. - } + end compactify } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 34c22439a932..645c6f81e539 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -20,6 +20,8 @@ import collection.mutable import reporting.{Profile, NoProfile} import dotty.tools.tasty.TastyFormat.ASTsSection +object TreePickler: + class StackSizeExceeded(val mdef: tpd.MemberDef) extends Exception class TreePickler(pickler: TastyPickler) { val buf: TreeBuffer = new TreeBuffer @@ -27,6 +29,7 @@ class TreePickler(pickler: TastyPickler) { import buf._ import pickler.nameBuffer.nameIndex import tpd._ + import TreePickler.* private val symRefs = Symbols.MutableSymbolMap[Addr](256) private val forwardSymRefs = Symbols.MutableSymbolMap[List[Addr]]() @@ -53,7 +56,7 @@ class TreePickler(pickler: TastyPickler) { def docString(tree: untpd.MemberDef): Option[Comment] = Option(docStrings.lookup(tree)) - private def withLength(op: => Unit) = { + private inline def withLength(inline op: Unit) = { val lengthAddr = reserveRef(relative = true) op fillRef(lengthAddr, currentAddr, relative = true) @@ -68,15 +71,12 @@ class TreePickler(pickler: TastyPickler) { case _ => } - def registerDef(sym: Symbol): Unit = { + def registerDef(sym: Symbol): Unit = symRefs(sym) = currentAddr - forwardSymRefs.get(sym) match { - case Some(refs) => - refs.foreach(fillRef(_, currentAddr, relative = false)) - forwardSymRefs -= sym - case None => - } - } + val refs = forwardSymRefs.lookup(sym) + if refs != null then + refs.foreach(fillRef(_, currentAddr, relative = false)) + forwardSymRefs -= sym def pickleName(name: Name): Unit = writeNat(nameIndex(name).index) @@ -85,17 +85,19 @@ class TreePickler(pickler: TastyPickler) { if (sig eq Signature.NotAMethod) name else SignedName(name.toTermName, sig, target.asTermName)) - private def pickleSymRef(sym: Symbol)(using Context) = symRefs.get(sym) match { - case Some(label) => - if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym) - case None => + private def pickleSymRef(sym: Symbol)(using Context) = + val label: Addr | Null = symRefs.lookup(sym) + if label == null then // See pos/t1957.scala for an example where this can happen. // I believe it's a bug in typer: the type of an implicit argument refers // to a closure parameter outside the closure itself. TODO: track this down, so that we // can eliminate this case. report.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.srcPos) pickleForwardSymRef(sym) - } + else if label == NoAddr then + pickleForwardSymRef(sym) + else + writeRef(label.uncheckedNN) // !!! Dotty problem: Not clear why nn or uncheckedNN is needed here private def pickleForwardSymRef(sym: Symbol)(using Context) = { val ref = reserveRef(relative = false) @@ -207,7 +209,7 @@ class TreePickler(pickler: TastyPickler) { else if (tpe.prefix == NoPrefix) { writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect) if Config.checkLevelsOnConstraints && !symRefs.contains(sym) && !sym.isPatternBound && !sym.hasAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) then - report.error(i"pickling reference to as yet undefined $tpe with symbol ${sym}", sym.srcPos) + report.error(em"pickling reference to as yet undefined $tpe with symbol ${sym}", sym.srcPos) pickleSymRef(sym) } else tpe.designator match { @@ -285,7 +287,6 @@ class TreePickler(pickler: TastyPickler) { var mods = EmptyFlags if tpe.isContextualMethod then mods |= Given else if tpe.isImplicitMethod then mods |= Implicit - if tpe.isErasedMethod then mods |= Erased pickleMethodic(METHODtype, tpe, mods) case tpe: ParamRef => assert(pickleParamRef(tpe), s"orphan parameter reference: $tpe") @@ -328,23 +329,30 @@ class TreePickler(pickler: TastyPickler) { registerDef(sym) writeByte(tag) val addr = currentAddr - withLength { - pickleName(sym.name) - pickleParams - tpt match { - case _: Template | _: Hole => pickleTree(tpt) - case _ if tpt.isType => pickleTpt(tpt) + try + withLength { + pickleName(sym.name) + pickleParams + tpt match { + case _: Template | _: Hole => pickleTree(tpt) + case _ if tpt.isType => pickleTpt(tpt) + } + pickleTreeUnlessEmpty(rhs) + pickleModifiers(sym, mdef) } - pickleTreeUnlessEmpty(rhs) - pickleModifiers(sym, mdef) - } + catch + case ex: Throwable => + if !ctx.settings.YnoDecodeStacktraces.value + && handleRecursive.underlyingStackOverflowOrNull(ex) != null then + throw StackSizeExceeded(mdef) + else + throw ex if sym.is(Method) && sym.owner.isClass then profile.recordMethodSize(sym, currentAddr.index - addr.index, mdef.span) - for - docCtx <- ctx.docCtx - comment <- docCtx.docstring(sym) - do - docStrings(mdef) = comment + for docCtx <- ctx.docCtx do + val comment = docCtx.docstrings.lookup(sym) + if comment != null then + docStrings(mdef) = comment } def pickleParam(tree: Tree)(using Context): Unit = { @@ -426,6 +434,13 @@ class TreePickler(pickler: TastyPickler) { writeByte(THROW) pickleTree(args.head) } + else if fun.symbol.originalSignaturePolymorphic.exists then + writeByte(APPLYsigpoly) + withLength { + pickleTree(fun) + pickleType(fun.tpe.widenTermRefExpr, richTypes = true) // this widens to a MethodType, so need richTypes + args.foreach(pickleTree) + } else { writeByte(APPLY) withLength { @@ -451,7 +466,7 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleTree(qual); if (!mix.isEmpty) { - // mixinType being a TypeRef when mix is non-empty is enforced by TreeChecker#checkSuper + // mixinType being a TypeRef when mix is non-empty is enforced by TreeChecker#checkSuper val SuperType(_, mixinType: TypeRef) = tree.tpe: @unchecked pickleTree(mix.withType(mixinType)) } @@ -650,11 +665,31 @@ class TreePickler(pickler: TastyPickler) { pickleTree(hi) pickleTree(alias) } - case Hole(_, idx, args, _, tpt) => + case tree @ Quote(body, Nil) => + // TODO: Add QUOTE tag to TASTy + assert(body.isTerm, + """Quote with type should not be pickled. + |Quote with type should only exists after staging phase at staging level 0.""".stripMargin) + pickleTree( + // scala.quoted.runtime.Expr.quoted[]() + ref(defn.QuotedRuntime_exprQuote) + .appliedToType(tree.bodyType) + .appliedTo(body) + .withSpan(tree.span) + ) + case Splice(expr) => + pickleTree( // TODO: Add SPLICE tag to TASTy + // scala.quoted.runtime.Expr.splice[]() + ref(defn.QuotedRuntime_exprSplice) + .appliedToType(tree.tpe) + .appliedTo(expr) + .withSpan(tree.span) + ) + case Hole(_, idx, args, _) => writeByte(HOLE) withLength { writeNat(idx) - pickleType(tpt.tpe, richTypes = true) + pickleType(tree.tpe, richTypes = true) args.foreach(pickleTree) } } @@ -777,18 +812,39 @@ class TreePickler(pickler: TastyPickler) { def pickle(trees: List[Tree])(using Context): Unit = { profile = Profile.current - trees.foreach(tree => if (!tree.isEmpty) pickleTree(tree)) + for tree <- trees do + try + if !tree.isEmpty then pickleTree(tree) + catch case ex: StackSizeExceeded => + report.error( + em"""Recursion limit exceeded while pickling ${ex.mdef} + |in ${ex.mdef.symbol.showLocated}. + |You could try to increase the stacksize using the -Xss JVM option. + |For the unprocessed stack trace, compile with -Yno-decode-stacktraces.""", + ex.mdef.srcPos) + def missing = forwardSymRefs.keysIterator .map(sym => i"${sym.showLocated} (line ${sym.srcPos.line}) #${sym.id}") .toList assert(forwardSymRefs.isEmpty, i"unresolved symbols: $missing%, % when pickling ${ctx.source}") } - def compactify(): Unit = { - buf.compactify() + def compactify(scratch: ScratchData = new ScratchData): Unit = { + buf.compactify(scratch) def updateMapWithDeltas(mp: MutableSymbolMap[Addr]) = - for (key <- mp.keysIterator.toBuffer[Symbol]) mp(key) = adjusted(mp(key)) + val keys = new Array[Symbol](mp.size) + val it = mp.keysIterator + var i = 0 + while i < keys.length do + keys(i) = it.next + i += 1 + assert(!it.hasNext) + i = 0 + while i < keys.length do + val key = keys(i) + mp(key) = adjusted(mp(key), scratch) + i += 1 updateMapWithDeltas(symRefs) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 617a2c55a7ad..98bd7152ff37 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -74,6 +74,9 @@ class TreeUnpickler(reader: TastyReader, */ private val typeAtAddr = new mutable.HashMap[Addr, Type] + /** If this is a pickled quote, the owner of the quote, otherwise NoSymbol. */ + private var rootOwner: Symbol = NoSymbol + /** The root symbol denotation which are defined by the Tasty file associated with this * TreeUnpickler. Set by `enterTopLevel`. */ @@ -106,11 +109,12 @@ class TreeUnpickler(reader: TastyReader, /** The unpickled trees */ def unpickle(mode: UnpickleMode)(using Context): List[Tree] = { + if mode != UnpickleMode.TopLevel then rootOwner = ctx.owner assert(roots != null, "unpickle without previous enterTopLevel") val rdr = new TreeReader(reader) mode match { case UnpickleMode.TopLevel => rdr.readTopLevel() - case UnpickleMode.Term => rdr.readTerm() :: Nil + case UnpickleMode.Term => rdr.readTree() :: Nil case UnpickleMode.TypeTree => rdr.readTpt() :: Nil } } @@ -245,7 +249,6 @@ class TreeUnpickler(reader: TastyReader, while currentAddr != end do // avoid boxing the mods readByte() match case IMPLICIT => mods |= Implicit - case ERASED => mods |= Erased case GIVEN => mods |= Given (names, mods) @@ -386,7 +389,7 @@ class TreeUnpickler(reader: TastyReader, val hi = readVariances(readType()) createNullableTypeBounds(lo, hi) case ANNOTATEDtype => - AnnotatedType(readType(), Annotation(readTerm())) + AnnotatedType(readType(), Annotation(readTree())) case ANDtype => AndType(readType(), readType()) case ORtype => @@ -402,9 +405,7 @@ class TreeUnpickler(reader: TastyReader, case METHODtype => def methodTypeCompanion(mods: FlagSet): MethodTypeCompanion = if mods.is(Implicit) then ImplicitMethodType - else if mods.isAllOf(Erased | Given) then ErasedContextualMethodType else if mods.is(Given) then ContextualMethodType - else if mods.is(Erased) then ErasedMethodType else MethodType readMethodic(methodTypeCompanion, _.toTermName) case TYPELAMBDAtype => @@ -487,7 +488,7 @@ class TreeUnpickler(reader: TastyReader, def readTypeRef(): Type = typeAtAddr(readAddr()) - def readTermRef()(using Context): TermRef = + def readTreeRef()(using Context): TermRef = readType().asInstanceOf[TermRef] /** Under pureFunctions, map all function types to impure function types, @@ -625,7 +626,9 @@ class TreeUnpickler(reader: TastyReader, else newSymbol(ctx.owner, name, flags, completer, privateWithin, coord) } - val annots = annotFns.map(_(sym.owner)) + val annotOwner = + if sym.owner.isClass then newLocalDummy(sym.owner) else sym.owner + val annots = annotFns.map(_(annotOwner)) sym.annotations = annots if sym.isOpaqueAlias then sym.setFlag(Deferred) val isScala2MacroDefinedInScala3 = flags.is(Macro, butNot = Inline) && flags.is(Erased) @@ -653,7 +656,7 @@ class TreeUnpickler(reader: TastyReader, val ctx1 = localContext(sym)(using ctx0).addMode(Mode.ReadPositions) inContext(sourceChangeContext(Addr(0))(using ctx1)) { // avoids space leaks by not capturing the current context - forkAt(rhsStart).readTerm() + forkAt(rhsStart).readTree() } }) goto(start) @@ -736,7 +739,7 @@ class TreeUnpickler(reader: TastyReader, readByte() val end = readEnd() val tp = readType() - val lazyAnnotTree = readLaterWithOwner(end, _.readTerm()) + val lazyAnnotTree = readLaterWithOwner(end, _.readTree()) owner => new DeferredSymAndTree(tp.typeSymbol, lazyAnnotTree(owner).complete): // Only force computation of symbol if it has the right name. This added @@ -787,7 +790,7 @@ class TreeUnpickler(reader: TastyReader, if (sctx `ne` ctx) return processPackage(op)(using sctx) readByte() val end = readEnd() - val pid = ref(readTermRef()).asInstanceOf[RefTree] + val pid = ref(readTreeRef()).asInstanceOf[RefTree] op(pid, end)(using localContext(pid.symbol.moduleClass)) } @@ -855,7 +858,7 @@ class TreeUnpickler(reader: TastyReader, def complete(using Context) = inlines.Inlines.bodyToInline(sym) } else - readLater(end, _.readTerm()) + readLater(end, _.readTree()) def ValDef(tpt: Tree) = ta.assignType(untpd.ValDef(sym.name.asTermName, tpt, readRhs(using localCtx)), sym) @@ -957,6 +960,51 @@ class TreeUnpickler(reader: TastyReader, tree.setDefTree } + /** Read enough of parent to determine its type, without reading arguments + * of applications. This is necessary to make TreeUnpickler as lazy as Namer + * in this regard. See i16673 for a test case. + */ + private def readParentType()(using Context): Type = + readByte() match + case TYPEAPPLY => + val end = readEnd() + val tycon = readParentType() + if tycon.typeParams.isEmpty then + goto(end) + tycon + else + val args = until(end)(readTpt()) + val cls = tycon.classSymbol + assert(cls.typeParams.hasSameLengthAs(args)) + cls.typeRef.appliedTo(args.tpes) + case APPLY | BLOCK => + val end = readEnd() + try readParentType() + finally goto(end) + case SELECTin => + val end = readEnd() + readName() + readTree() match + case nu: New => + try nu.tpe + finally goto(end) + case SHAREDterm => + forkAt(readAddr()).readParentType() + + /** Read template parents + * @param withArgs if false, only read enough of parent trees to determine their type + * but skip constructor arguments. Return any trees that were partially + * parsed in this way as InferredTypeTrees. + */ + def readParents(withArgs: Boolean)(using Context): List[Tree] = + collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { + nextUnsharedTag match + case APPLY | TYPEAPPLY | BLOCK => + if withArgs then readTree() + else InferredTypeTree().withType(readParentType()) + case _ => readTpt() + } + private def readTemplate(using Context): Template = { val start = currentAddr assert(sourcePathAt(start).isEmpty) @@ -979,12 +1027,8 @@ class TreeUnpickler(reader: TastyReader, while (bodyIndexer.reader.nextByte != DEFDEF) bodyIndexer.skipTree() bodyIndexer.indexStats(end) } - val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { - nextUnsharedTag match { - case APPLY | TYPEAPPLY | BLOCK => readTerm()(using parentCtx) - case _ => readTpt()(using parentCtx) - } - } + val parentReader = fork + val parents = readParents(withArgs = false)(using parentCtx) val parentTypes = parents.map(_.tpe.dealias) val self = if (nextByte == SELFDEF) { @@ -998,7 +1042,13 @@ class TreeUnpickler(reader: TastyReader, selfInfo = if (self.isEmpty) NoType else self.tpt.tpe) .integrateOpaqueMembers val constr = readIndexedDef().asInstanceOf[DefDef] - val mappedParents = parents.map(_.changeOwner(localDummy, constr.symbol)) + val mappedParents: LazyTreeList = + if parents.exists(_.isInstanceOf[InferredTypeTree]) then + // parents were not read fully, will need to be read again later on demand + new LazyReader(parentReader, localDummy, ctx.mode, ctx.source, + _.readParents(withArgs = true) + .map(_.changeOwner(localDummy, constr.symbol))) + else parents val lazyStats = readLater(end, rdr => { val stats = rdr.readIndexedStats(localDummy, end) @@ -1007,7 +1057,7 @@ class TreeUnpickler(reader: TastyReader, defn.patchStdLibClass(cls) NamerOps.addConstructorProxies(cls) setSpan(start, - untpd.Template(constr, mappedParents, Nil, self, lazyStats) + untpd.Template(constr, mappedParents, self, lazyStats) .withType(localDummy.termRef)) } @@ -1045,7 +1095,7 @@ class TreeUnpickler(reader: TastyReader, setSpan(start, PackageDef(pid, readIndexedStats(exprOwner, end))) } case _ => - readTerm()(using ctx.withOwner(exprOwner)) + readTree()(using ctx.withOwner(exprOwner)) } inline def readImportOrExport(inline mkTree: @@ -1054,7 +1104,7 @@ class TreeUnpickler(reader: TastyReader, assert(sourcePathAt(start).isEmpty) readByte() readEnd() - val expr = readTerm() + val expr = readTree() setSpan(start, mkTree(expr, readSelectors())) } @@ -1112,14 +1162,14 @@ class TreeUnpickler(reader: TastyReader, // ------ Reading trees ----------------------------------------------------- - def readTerm()(using Context): Tree = { // TODO: rename to readTree + def readTree()(using Context): Tree = { val sctx = sourceChangeContext() - if (sctx `ne` ctx) return readTerm()(using sctx) + if (sctx `ne` ctx) return readTree()(using sctx) val start = currentAddr val tag = readByte() pickling.println(s"reading term ${astTagToString(tag)} at $start, ${ctx.source}") - def readPathTerm(): Tree = { + def readPathTree(): Tree = { goto(start) readType() match { case path: TypeRef => TypeTree(path) @@ -1139,12 +1189,12 @@ class TreeUnpickler(reader: TastyReader, ConstFold.Select(untpd.Select(qual, name).withType(tpe)) def completeSelect(name: Name, sig: Signature, target: Name): Select = - val qual = readTerm() + val qual = readTree() val denot = accessibleDenot(qual.tpe.widenIfUnstable, name, sig, target) makeSelect(qual, name, denot) def readQualId(): (untpd.Ident, TypeRef) = - val qual = readTerm().asInstanceOf[untpd.Ident] + val qual = readTree().asInstanceOf[untpd.Ident] (untpd.Ident(qual.name).withSpan(qual.span), qual.tpe.asInstanceOf[TypeRef]) def accessibleDenot(qualType: Type, name: Name, sig: Signature, target: Name) = { @@ -1154,9 +1204,9 @@ class TreeUnpickler(reader: TastyReader, else qualType.findMember(name, pre, excluded = Private).atSignature(sig, target) } - def readSimpleTerm(): Tree = tag match { + def readSimpleTree(): Tree = tag match { case SHAREDterm => - forkAt(readAddr()).readTerm() + forkAt(readAddr()).readTree() case IDENT => untpd.Ident(readName()).withType(readType()) case IDENTtpt => @@ -1175,16 +1225,16 @@ class TreeUnpickler(reader: TastyReader, case NEW => New(readTpt()) case THROW => - Throw(readTerm()) + Throw(readTree()) case SINGLETONtpt => - SingletonTypeTree(readTerm()) + SingletonTypeTree(readTree()) case BYNAMEtpt => val arg = readTpt() ByNameTypeTree(if knowsPureFuns then arg else arg.adaptByNameArgUnderPureFuns) case NAMEDARG => - NamedArg(readName(), readTerm()) + NamedArg(readName(), readTree()) case _ => - readPathTerm() + readPathTree() } /** Adapt constructor calls where class has only using clauses from old to new scheme. @@ -1217,56 +1267,80 @@ class TreeUnpickler(reader: TastyReader, res.withAttachment(SuppressedApplyToNone, ()) else res + def quotedExpr(fn: Tree, args: List[Tree]): Tree = + val TypeApply(_, targs) = fn: @unchecked + untpd.Quote(args.head, Nil).withBodyType(targs.head.tpe) + + def splicedExpr(fn: Tree, args: List[Tree]): Tree = + val TypeApply(_, targs) = fn: @unchecked + Splice(args.head, targs.head.tpe) + + def nestedSpliceExpr(fn: Tree, args: List[Tree]): Tree = + fn match + case Apply(TypeApply(_, targs), _ :: Nil) => // nestedSplice[T](quotes)(expr) + Splice(args.head, targs.head.tpe) + case _ => // nestedSplice[T](quotes) + tpd.Apply(fn, args) + def simplifyLub(tree: Tree): Tree = tree.overwriteType(tree.tpe.simplified) tree - def readLengthTerm(): Tree = { + def readLengthTree(): Tree = { val end = readEnd() val result = (tag: @switch) match { case SUPER => - val qual = readTerm() + val qual = readTree() val (mixId, mixTpe) = ifBefore(end)(readQualId(), (untpd.EmptyTypeIdent, NoType)) tpd.Super(qual, mixId, mixTpe.typeSymbol) case APPLY => - val fn = readTerm() - val args = until(end)(readTerm()) + val fn = readTree() + val args = until(end)(readTree()) if fn.symbol.isConstructor then constructorApply(fn, args) + else if fn.symbol == defn.QuotedRuntime_exprQuote then quotedExpr(fn, args) + else if fn.symbol == defn.QuotedRuntime_exprSplice then splicedExpr(fn, args) + else if fn.symbol == defn.QuotedRuntime_exprNestedSplice then nestedSpliceExpr(fn, args) else tpd.Apply(fn, args) case TYPEAPPLY => - tpd.TypeApply(readTerm(), until(end)(readTpt())) + tpd.TypeApply(readTree(), until(end)(readTpt())) + case APPLYsigpoly => + val fn = readTree() + val methType = readType() + val args = until(end)(readTree()) + val fun2 = typer.Applications.retypeSignaturePolymorphicFn(fn, methType) + tpd.Apply(fun2, args) case TYPED => - val expr = readTerm() + val expr = readTree() val tpt = readTpt() Typed(expr, tpt) case ASSIGN => - Assign(readTerm(), readTerm()) + Assign(readTree(), readTree()) case BLOCK => val exprReader = fork skipTree() readStats(ctx.owner, end, - (stats, ctx) => Block(stats, exprReader.readTerm()(using ctx))) + (stats, ctx) => Block(stats, exprReader.readTree()(using ctx))) case INLINED => val exprReader = fork skipTree() def maybeCall = nextUnsharedTag match { case VALDEF | DEFDEF => EmptyTree - case _ => readTerm() + case _ => readTree() } val call = ifBefore(end)(maybeCall, EmptyTree) val bindings = readStats(ctx.owner, end).asInstanceOf[List[ValOrDefDef]] - val expansion = exprReader.readTerm() // need bindings in scope, so needs to be read before + val expansion = exprReader.readTree() // need bindings in scope, so needs to be read before Inlined(call, bindings, expansion) case IF => if (nextByte == INLINE) { readByte() - InlineIf(readTerm(), readTerm(), readTerm()) + InlineIf(readTree(), readTree(), readTree()) } else - If(readTerm(), readTerm(), readTerm()) + If(readTree(), readTree(), readTree()) case LAMBDA => - val meth = readTerm() + val meth = readTree() val tpt = ifBefore(end)(readTpt(), EmptyTree) Closure(Nil, meth, tpt) case MATCH => @@ -1277,24 +1351,24 @@ class TreeUnpickler(reader: TastyReader, } else if (nextByte == INLINE) { readByte() - InlineMatch(readTerm(), readCases(end)) + InlineMatch(readTree(), readCases(end)) } - else Match(readTerm(), readCases(end))) + else Match(readTree(), readCases(end))) case RETURN => val from = readSymRef() - val expr = ifBefore(end)(readTerm(), EmptyTree) + val expr = ifBefore(end)(readTree(), EmptyTree) Return(expr, Ident(from.termRef)) case WHILE => - WhileDo(readTerm(), readTerm()) + WhileDo(readTree(), readTree()) case TRY => simplifyLub( - Try(readTerm(), readCases(end), ifBefore(end)(readTerm(), EmptyTree))) + Try(readTree(), readCases(end), ifBefore(end)(readTree(), EmptyTree))) case SELECTouter => val levels = readNat() - readTerm().outerSelect(levels, SkolemType(readType())) + readTree().outerSelect(levels, SkolemType(readType())) case SELECTin => var sname = readName() - val qual = readTerm() + val qual = readTree() val ownerTpe = readType() val owner = ownerTpe.typeSymbol val SignedName(name, sig, target) = sname: @unchecked // only methods with params use SELECTin @@ -1325,26 +1399,26 @@ class TreeUnpickler(reader: TastyReader, makeSelect(qual, name, denot) case REPEATED => val elemtpt = readTpt() - SeqLiteral(until(end)(readTerm()), elemtpt) + SeqLiteral(until(end)(readTree()), elemtpt) case BIND => val sym = symAtAddr.getOrElse(start, forkAt(start).createSymbol()) readName() readType() - val body = readTerm() + val body = readTree() val (givenFlags, _, _) = readModifiers(end) sym.setFlag(givenFlags) Bind(sym, body) case ALTERNATIVE => - Alternative(until(end)(readTerm())) + Alternative(until(end)(readTree())) case UNAPPLY => - val fn = readTerm() + val fn = readTree() val implicitArgs = collectWhile(nextByte == IMPLICITarg) { readByte() - readTerm() + readTree() } val patType = readType() - val argPats = until(end)(readTerm()) + val argPats = until(end)(readTree()) UnApply(fn, implicitArgs, argPats, patType) case REFINEDtpt => val refineCls = symAtAddr.getOrElse(start, @@ -1364,7 +1438,7 @@ class TreeUnpickler(reader: TastyReader, val ownType = ctx.typeAssigner.processAppliedType(tree, tycon.tpe.safeAppliedTo(args.tpes)) tree.withType(postProcessFunction(ownType)) case ANNOTATEDtpt => - Annotated(readTpt(), readTerm()) + Annotated(readTpt(), readTree()) case LAMBDAtpt => val tparams = readParams[TypeDef](TYPEPARAM) val body = readTpt() @@ -1382,16 +1456,16 @@ class TreeUnpickler(reader: TastyReader, case HOLE => val idx = readNat() val tpe = readType() - val args = until(end)(readTerm()) - Hole(true, idx, args, EmptyTree, TypeTree(tpe)).withType(tpe) + val args = until(end)(readTree()) + Hole(true, idx, args, EmptyTree, tpe) case _ => - readPathTerm() + readPathTree() } assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") result } - val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm() + val tree = if (tag < firstLengthTreeTag) readSimpleTree() else readLengthTree() setSpan(start, tree) } @@ -1416,10 +1490,10 @@ class TreeUnpickler(reader: TastyReader, val end = readEnd() val idx = readNat() val tpe = readType() - val args = until(end)(readTerm()) - Hole(false, idx, args, EmptyTree, TypeTree(tpe)).withType(tpe) + val args = until(end)(readTree()) + Hole(false, idx, args, EmptyTree, tpe) case _ => - if (isTypeTreeTag(nextByte)) readTerm() + if (isTypeTreeTag(nextByte)) readTree() else { val start = currentAddr val tp = readType() @@ -1444,9 +1518,9 @@ class TreeUnpickler(reader: TastyReader, val start = currentAddr assert(readByte() == CASEDEF) val end = readEnd() - val pat = readTerm() - val rhs = readTerm() - val guard = ifBefore(end)(readTerm(), EmptyTree) + val pat = readTree() + val rhs = readTree() + val guard = ifBefore(end)(readTree(), EmptyTree) setSpan(start, CaseDef(pat, guard, rhs)) } @@ -1580,7 +1654,7 @@ class TreeUnpickler(reader: TastyReader, pickling.println(i"no owner for $addr among $cs%, %") throw ex } - try search(children, NoSymbol) + try search(children, rootOwner) catch { case ex: TreeWithoutOwner => pickling.println(s"ownerTree = $ownerTree") diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala index f2d25d0f34b5..cc2d7dd7ee56 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala @@ -39,9 +39,9 @@ object Scala2Erasure: case RefinedType(parent, _, _) => checkSupported(parent) case AnnotatedType(parent, _) if parent.dealias.isInstanceOf[Scala2RefinedType] => - throw new TypeError(i"Unsupported Scala 2 type: Component $parent of intersection is annotated.") + throw TypeError(em"Unsupported Scala 2 type: Component $parent of intersection is annotated.") case tp @ TypeRef(prefix, _) if !tp.symbol.exists && prefix.dealias.isInstanceOf[Scala2RefinedType] => - throw new TypeError(i"Unsupported Scala 2 type: Prefix $prefix of intersection component is an intersection or refinement.") + throw TypeError(em"Unsupported Scala 2 type: Prefix $prefix of intersection component is an intersection or refinement.") case _ => /** A type that would be represented as a RefinedType in Scala 2. diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 561b1eac2391..deb022d3c261 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -89,7 +89,11 @@ object Scala2Unpickler { val sourceModule = denot.sourceModule.orElse { // For non-toplevel modules, `sourceModule` won't be set when completing // the module class, we need to go find it ourselves. - NamerOps.findModuleBuddy(cls.name.sourceModuleName, denot.owner.info.decls) + val modName = cls.name.sourceModuleName + val alternate = + if cls.privateWithin.exists && cls.owner.is(Trait) then modName.expandedName(cls.owner) + else EmptyTermName + NamerOps.findModuleBuddy(modName, denot.owner.info.decls, alternate) } denot.owner.thisType.select(sourceModule) else selfInfo @@ -129,9 +133,8 @@ object Scala2Unpickler { /** Unpickle symbol table information descending from a class and/or module root * from an array of bytes. * @param bytes bytearray from which we unpickle - * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable - * @param moduleroot the top-level module class which is unpickled, or NoSymbol if inapplicable - * @param filename filename associated with bytearray, only used for error messages + * @param classRoot the top-level class which is unpickled, or NoSymbol if inapplicable + * @param moduleClassRoot the top-level module class which is unpickled, or NoSymbol if inapplicable */ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot: ClassDenotation)(ictx: Context) extends PickleBuffer(bytes, 0, -1) with ClassfileParser.Embedded { diff --git a/compiler/src/dotty/tools/dotc/coverage/Location.scala b/compiler/src/dotty/tools/dotc/coverage/Location.scala index c565c2bb1116..aa7a586d4b57 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Location.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Location.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.util.SourceFile * @param className name of the closest enclosing class * @param fullClassName fully qualified name of the closest enclosing class * @param classType "type" of the closest enclosing class: Class, Trait or Object - * @param method name of the closest enclosing method + * @param methodName name of the closest enclosing method * @param sourcePath absolute path of the source file */ final case class Location( diff --git a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala index 864f5277bff3..86ae99b3e0f9 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala @@ -29,7 +29,7 @@ class ReadTasty extends Phase { val className = unit.className.toTypeName def cannotUnpickle(reason: String): None.type = { - report.error(s"class $className cannot be unpickled because $reason") + report.error(em"class $className cannot be unpickled because $reason") None } diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index 04c65a3d3882..fb0abe3332ed 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -6,6 +6,7 @@ import scala.language.unsafeNulls import io.{JarArchive, AbstractFile, Path} import core.Contexts._ +import core.Decorators.em import java.io.File class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { @@ -27,7 +28,7 @@ class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { .toList case "tasty" => TastyFileUtil.getClassName(file) case _ => - report.error(s"File extension is not `tasty` or `jar`: ${file.path}") + report.error(em"File extension is not `tasty` or `jar`: ${file.path}") Nil } classNames.map(new TASTYCompilationUnit(_)) diff --git a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala index debf51872d5a..ebb76e9e9bf9 100644 --- a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala +++ b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala @@ -148,47 +148,6 @@ class InlineReducer(inliner: Inliner)(using Context): binding1.withSpan(call.span) } - /** Rewrite an application - * - * ((x1, ..., xn) => b)(e1, ..., en) - * - * to - * - * val/def x1 = e1; ...; val/def xn = en; b - * - * where `def` is used for call-by-name parameters. However, we shortcut any NoPrefix - * refs among the ei's directly without creating an intermediate binding. - */ - def betaReduce(tree: Tree)(using Context): Tree = tree match { - case Apply(Select(cl @ closureDef(ddef), nme.apply), args) if defn.isFunctionType(cl.tpe) => - // closureDef also returns a result for closures wrapped in Inlined nodes. - // These need to be preserved. - def recur(cl: Tree): Tree = cl match - case Inlined(call, bindings, expr) => - cpy.Inlined(cl)(call, bindings, recur(expr)) - case _ => ddef.tpe.widen match - case mt: MethodType if ddef.paramss.head.length == args.length => - val bindingsBuf = new DefBuffer - val argSyms = mt.paramNames.lazyZip(mt.paramInfos).lazyZip(args).map { (name, paramtp, arg) => - arg.tpe.dealias match { - case ref @ TermRef(NoPrefix, _) => ref.symbol - case _ => - paramBindingDef(name, paramtp, arg, bindingsBuf)( - using ctx.withSource(cl.source) - ).symbol - } - } - val expander = new TreeTypeMap( - oldOwners = ddef.symbol :: Nil, - newOwners = ctx.owner :: Nil, - substFrom = ddef.paramss.head.map(_.symbol), - substTo = argSyms) - Block(bindingsBuf.toList, expander.transform(ddef.rhs)).withSpan(tree.span) - case _ => tree - recur(cl) - case _ => tree - } - /** The result type of reducing a match. It consists optionally of a list of bindings * for the pattern-bound variables and the RHS of the selected case. * Returns `None` if no case was selected. @@ -269,12 +228,21 @@ class InlineReducer(inliner: Inliner)(using Context): } } - // Extractors contain Bind nodes in type parameter lists, the tree looks like this: + // Extractors can contain Bind nodes in type parameter lists, + // for that case tree looks like this: // UnApply[t @ t](pats)(implicits): T[t] // Test case is pos/inline-caseclass.scala. + // Alternatively, for explicitly specified type binds in type annotations like in + // case A(B): A[t] + // the tree will look like this: + // Unapply[t](pats)(implicits) : T[t @ t] + // and the binds will be found in the type tree instead + // Test case is pos-macros/i15971 + val tptBinds = getBinds(Set.empty[TypeSymbol], tpt) val binds: Set[TypeSymbol] = pat match { - case UnApply(TypeApply(_, tpts), _, _) => getBinds(Set.empty[TypeSymbol], tpts) - case _ => getBinds(Set.empty[TypeSymbol], tpt) + case UnApply(TypeApply(_, tpts), _, _) => + getBinds(Set.empty[TypeSymbol], tpts) ++ tptBinds + case _ => tptBinds } val extractBindVariance = new TypeAccumulator[TypeBindsMap] { @@ -303,11 +271,11 @@ class InlineReducer(inliner: Inliner)(using Context): def addTypeBindings(typeBinds: TypeBindsMap)(using Context): Unit = typeBinds.foreachBinding { case (sym, shouldBeMinimized) => newTypeBinding(sym, - ctx.gadt.approximation(sym, fromBelow = shouldBeMinimized, maxLevel = Int.MaxValue)) + ctx.gadtState.approximation(sym, fromBelow = shouldBeMinimized, maxLevel = Int.MaxValue)) } def registerAsGadtSyms(typeBinds: TypeBindsMap)(using Context): Unit = - if (typeBinds.size > 0) ctx.gadt.addToConstraint(typeBinds.keys) + if (typeBinds.size > 0) ctx.gadtState.addToConstraint(typeBinds.keys) pat match { case Typed(pat1, tpt) => diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index bea42e82ce6f..73fa2a2871a2 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -21,7 +21,9 @@ import collection.mutable import reporting.trace import util.Spans.Span import dotty.tools.dotc.transform.Splicer +import dotty.tools.dotc.transform.BetaReduce import quoted.QuoteUtils +import staging.StagingLevel.{level, spliceContext} import scala.annotation.constructorOnly /** General support for inlining */ @@ -199,17 +201,24 @@ class Inliner(val call: tpd.Tree)(using Context): * to `buf`. * @param name the name of the parameter * @param formal the type of the parameter - * @param arg the argument corresponding to the parameter + * @param arg0 the argument corresponding to the parameter * @param buf the buffer to which the definition should be appended */ private[inlines] def paramBindingDef(name: Name, formal: Type, arg0: Tree, buf: DefBuffer)(using Context): ValOrDefDef = { val isByName = formal.dealias.isInstanceOf[ExprType] - val arg = arg0 match { - case Typed(arg1, tpt) if tpt.tpe.isRepeatedParam && arg1.tpe.derivesFrom(defn.ArrayClass) => - wrapArray(arg1, arg0.tpe.elemType) - case _ => arg0 - } + val arg = + def dropNameArg(arg: Tree): Tree = arg match + case NamedArg(_, arg1) => arg1 + case SeqLiteral(elems, tpt) => + cpy.SeqLiteral(arg)(elems.mapConserve(dropNameArg), tpt) + case _ => arg + arg0 match + case Typed(seq, tpt) if tpt.tpe.isRepeatedParam => + if seq.tpe.derivesFrom(defn.ArrayClass) then wrapArray(dropNameArg(seq), arg0.tpe.elemType) + else cpy.Typed(arg0)(dropNameArg(seq), tpt) + case arg0 => + dropNameArg(arg0) val argtpe = arg.tpe.dealiasKeepAnnots.translateFromRepeated(toArray = false) val argIsBottom = argtpe.isBottomTypeAfterErasure val bindingType = @@ -227,7 +236,7 @@ class Inliner(val call: tpd.Tree)(using Context): val binding = { var newArg = arg.changeOwner(ctx.owner, boundSym) if bindingFlags.is(Inline) && argIsBottom then - newArg = Typed(newArg, TypeTree(formal)) // type ascribe RHS to avoid type errors in expansion. See i8612.scala + newArg = Typed(newArg, TypeTree(formal.widenExpr)) // type ascribe RHS to avoid type errors in expansion. See i8612.scala if isByName then DefDef(boundSym, newArg) else ValDef(boundSym, newArg) }.withSpan(boundSym.span) @@ -253,7 +262,7 @@ class Inliner(val call: tpd.Tree)(using Context): computeParamBindings(tp.resultType, targs.drop(tp.paramNames.length), argss, formalss, buf) case tp: MethodType => if argss.isEmpty then - report.error(i"missing arguments for inline method $inlinedMethod", call.srcPos) + report.error(em"missing arguments for inline method $inlinedMethod", call.srcPos) false else tp.paramNames.lazyZip(formalss.head).lazyZip(argss.head).foreach { (name, formal, arg) => @@ -477,6 +486,7 @@ class Inliner(val call: tpd.Tree)(using Context): /** Register type of leaf node */ private def registerLeaf(tree: Tree): Unit = tree match case _: This | _: Ident | _: TypeTree => registerTypes.traverse(tree.typeOpt) + case tree: Quote => registerTypes.traverse(tree.bodyType) case _ => /** Make `tree` part of inlined expansion. This means its owner has to be changed @@ -616,8 +626,8 @@ class Inliner(val call: tpd.Tree)(using Context): def issueError() = callValueArgss match { case (msgArg :: Nil) :: Nil => val message = msgArg.tpe match { - case ConstantType(Constant(msg: String)) => msg - case _ => s"A literal string is expected as an argument to `compiletime.error`. Got ${msgArg.show}" + case ConstantType(Constant(msg: String)) => msg.toMessage + case _ => em"A literal string is expected as an argument to `compiletime.error`. Got $msgArg" } // Usually `error` is called from within a rewrite method. In this // case we need to report the error at the point of the outermost enclosing inline @@ -749,9 +759,9 @@ class Inliner(val call: tpd.Tree)(using Context): ctx override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree = - val tree1 = inlineIfNeeded( - tryInlineArg(tree.asInstanceOf[tpd.Tree]) `orElse` super.typedIdent(tree, pt) - ) + val locked = ctx.typerState.ownedVars + val tree0 = tryInlineArg(tree.asInstanceOf[tpd.Tree]) `orElse` super.typedIdent(tree, pt) + val tree1 = inlineIfNeeded(tree0, pt, locked) tree1 match case id: Ident if tpd.needsSelect(id.tpe) => inlining.println(i"expanding $id to selection") @@ -760,6 +770,7 @@ class Inliner(val call: tpd.Tree)(using Context): tree1 override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { + val locked = ctx.typerState.ownedVars val qual1 = typed(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) val resNoReduce = untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt) val reducedProjection = reducer.reduceProjection(resNoReduce) @@ -771,7 +782,7 @@ class Inliner(val call: tpd.Tree)(using Context): if resNoReduce ne res then typed(res, pt) // redo typecheck if reduction changed something else if res.symbol.isInlineMethod then - inlineIfNeeded(res) + inlineIfNeeded(res, pt, locked) else ensureAccessible(res.tpe, tree.qualifier.isInstanceOf[untpd.Super], tree.srcPos) res @@ -805,27 +816,31 @@ class Inliner(val call: tpd.Tree)(using Context): super.typedValDef(vdef1, sym) override def typedApply(tree: untpd.Apply, pt: Type)(using Context): Tree = - def cancelQuotes(tree: Tree): Tree = - tree match - case Quoted(Spliced(inner)) => inner - case _ => tree - val res = cancelQuotes(constToLiteral(betaReduce(super.typedApply(tree, pt)))) match { - case res: Apply if res.symbol == defn.QuotedRuntime_exprSplice - && StagingContext.level == 0 - && !hasInliningErrors => - val expanded = expandMacro(res.args.head, tree.srcPos) - typedExpr(expanded) // Inline calls and constant fold code generated by the macro - case res => - specializeEq(inlineIfNeeded(res)) - } - res + val locked = ctx.typerState.ownedVars + specializeEq(inlineIfNeeded(constToLiteral(BetaReduce(super.typedApply(tree, pt))), pt, locked)) override def typedTypeApply(tree: untpd.TypeApply, pt: Type)(using Context): Tree = - val tree1 = inlineIfNeeded(constToLiteral(betaReduce(super.typedTypeApply(tree, pt)))) - if tree1.symbol.isQuote then + val locked = ctx.typerState.ownedVars + val tree1 = inlineIfNeeded(constToLiteral(BetaReduce(super.typedTypeApply(tree, pt))), pt, locked) + if tree1.symbol == defn.QuotedTypeModule_of then ctx.compilationUnit.needsStaging = true tree1 + override def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = + super.typedQuote(tree, pt) match + case Quote(Splice(inner), _) => inner + case tree1 => + ctx.compilationUnit.needsStaging = true + tree1 + + override def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = + super.typedSplice(tree, pt) match + case tree1 @ Splice(expr) if level == 0 && !hasInliningErrors => + val expanded = expandMacro(expr, tree1.srcPos) + transform.TreeChecker.checkMacroGeneratedTree(tree1, expanded) + typedExpr(expanded) // Inline calls and constant fold code generated by the macro + case tree1 => tree1 + override def typedMatch(tree: untpd.Match, pt: Type)(using Context): Tree = val tree1 = if tree.isInline then @@ -889,11 +904,11 @@ class Inliner(val call: tpd.Tree)(using Context): /** True if this inline typer has already issued errors */ override def hasInliningErrors(using Context) = ctx.reporter.errorCount > initialErrorCount - private def inlineIfNeeded(tree: Tree)(using Context): Tree = + private def inlineIfNeeded(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = val meth = tree.symbol if meth.isAllOf(DeferredInline) then - errorTree(tree, i"Deferred inline ${meth.showLocated} cannot be invoked") - else if Inlines.needsInlining(tree) then Inlines.inlineCall(tree) + errorTree(tree, em"Deferred inline ${meth.showLocated} cannot be invoked") + else if Inlines.needsInlining(tree) then Inlines.inlineCall(simplify(tree, pt, locked)) else tree override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = @@ -957,29 +972,24 @@ class Inliner(val call: tpd.Tree)(using Context): bindingOfSym(binding.symbol) = binding } - val countRefs = new TreeTraverser { - override def traverse(t: Tree)(using Context) = { - def updateRefCount(sym: Symbol, inc: Int) = - for (x <- refCount.get(sym)) refCount(sym) = x + inc - def updateTermRefCounts(t: Tree) = - t.typeOpt.foreachPart { - case ref: TermRef => updateRefCount(ref.symbol, 2) // can't be inlined, so make sure refCount is at least 2 - case _ => - } - - t match { - case t: RefTree => - updateRefCount(t.symbol, 1) - updateTermRefCounts(t) - case _: New | _: TypeTree => - updateTermRefCounts(t) - case _ => - } - traverseChildren(t) + def updateRefCount(sym: Symbol, inc: Int) = + for (x <- refCount.get(sym)) refCount(sym) = x + inc + def updateTermRefCounts(tree: Tree) = + tree.typeOpt.foreachPart { + case ref: TermRef => updateRefCount(ref.symbol, 2) // can't be inlined, so make sure refCount is at least 2 + case _ => } - } - countRefs.traverse(tree) - for (binding <- bindings) countRefs.traverse(binding) + def countRefs(tree: Tree) = + tree.foreachSubTree { + case t: RefTree => + updateRefCount(t.symbol, 1) + updateTermRefCounts(t) + case t @ (_: New | _: TypeTree) => + updateTermRefCounts(t) + case _ => + } + countRefs(tree) + for (binding <- bindings) countRefs(binding) def retain(boundSym: Symbol) = { refCount.get(boundSym) match { @@ -1002,7 +1012,7 @@ class Inliner(val call: tpd.Tree)(using Context): super.transform(t1) case t: Apply => val t1 = super.transform(t) - if (t1 `eq` t) t else reducer.betaReduce(t1) + if (t1 `eq` t) t else BetaReduce(t1) case Block(Nil, expr) => super.transform(expr) case _ => @@ -1021,9 +1031,9 @@ class Inliner(val call: tpd.Tree)(using Context): } private def expandMacro(body: Tree, splicePos: SrcPos)(using Context) = { - assert(StagingContext.level == 0) + assert(level == 0) val inlinedFrom = enclosingInlineds.last - val dependencies = macroDependencies(body) + val dependencies = macroDependencies(body)(using spliceContext) val suspendable = ctx.compilationUnit.isSuspendable if dependencies.nonEmpty && !ctx.reporter.errorsReported then for sym <- dependencies do @@ -1053,28 +1063,12 @@ class Inliner(val call: tpd.Tree)(using Context): */ private def macroDependencies(tree: Tree)(using Context) = new TreeAccumulator[List[Symbol]] { - private var level = -1 override def apply(syms: List[Symbol], tree: tpd.Tree)(using Context): List[Symbol] = - if level != -1 then foldOver(syms, tree) - else tree match { - case tree: RefTree if tree.isTerm && tree.symbol.isDefinedInCurrentRun && !tree.symbol.isLocal => + tree match { + case tree: RefTree if tree.isTerm && level == -1 && tree.symbol.isDefinedInCurrentRun && !tree.symbol.isLocal => foldOver(tree.symbol :: syms, tree) - case Quoted(body) => - level += 1 - try apply(syms, body) - finally level -= 1 - case Spliced(body) => - level -= 1 - try apply(syms, body) - finally level += 1 - case SplicedType(body) => - level -= 1 - try apply(syms, body) - finally level += 1 - case _: TypTree => - syms - case _ => - foldOver(syms, tree) + case _: TypTree => syms + case _ => foldOver(syms, tree) } }.apply(Nil, tree) end Inliner diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 8be23b932e98..36dc8a642afc 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -14,6 +14,7 @@ import ErrorReporting.errorTree import dotty.tools.dotc.util.{SourceFile, SourcePosition, SrcPos} import parsing.Parsers.Parser import transform.{PostTyper, Inlining, CrossVersionChecks} +import staging.StagingLevel import collection.mutable import reporting.trace @@ -56,7 +57,7 @@ object Inlines: case _ => isInlineable(tree.symbol) && !tree.tpe.widenTermRefExpr.isInstanceOf[MethodOrPoly] - && StagingContext.level == 0 + && StagingLevel.level == 0 && ( ctx.phase == Phases.inliningPhase || (ctx.phase == Phases.typerPhase && needsTransparentInlining(tree)) @@ -85,7 +86,10 @@ object Inlines: if (tree.symbol == defn.CompiletimeTesting_typeChecks) return Intrinsics.typeChecks(tree) if (tree.symbol == defn.CompiletimeTesting_typeCheckErrors) return Intrinsics.typeCheckErrors(tree) - CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) + if ctx.isAfterTyper then + // During typer we wait with cross version checks until PostTyper, in order + // not to provoke cyclic references. See i16116 for a test case. + CrossVersionChecks.checkExperimentalRef(tree.symbol, tree.srcPos) if tree.symbol.isConstructor then return tree // error already reported for the inline constructor definition @@ -153,9 +157,9 @@ object Inlines: else ("successive inlines", ctx.settings.XmaxInlines) errorTree( tree, - i"""|Maximal number of $reason (${setting.value}) exceeded, - |Maybe this is caused by a recursive inline method? - |You can use ${setting.name} to change the limit.""".toMessage, + em"""|Maximal number of $reason (${setting.value}) exceeded, + |Maybe this is caused by a recursive inline method? + |You can use ${setting.name} to change the limit.""", (tree :: enclosingInlineds).last.srcPos ) if ctx.base.stopInlining && enclosingInlineds.isEmpty then @@ -178,37 +182,28 @@ object Inlines: // as its right hand side. The call to the wrapper unapply serves as the signpost for pattern matching. // After pattern matching, the anonymous class is removed in phase InlinePatterns with a beta reduction step. // - // An inline unapply `P.unapply` in a plattern `P(x1,x2,...)` is transformed into - // `{ class $anon { def unapply(t0: T0)(using t1: T1, t2: T2, ...): R = P.unapply(t0)(using t1, t2, ...) }; new $anon }.unapply` - // and the call `P.unapply(x1, x2, ...)` is inlined. + // An inline unapply `P.unapply` in a pattern `P[...](using ...)(x1,x2,...)(using t1: T1, t2: T2, ...)` is transformed into + // `{ class $anon { def unapply(s: S)(using t1: T1, t2: T2, ...): R = P.unapply[...](using ...)(s)(using t1, t2, ...) }; new $anon }.unapply(using y1,y2,...)` + // and the call `P.unapply[...](using ...)(x1, x2, ...)(using t1, t2, ...)` is inlined. // This serves as a placeholder for the inlined body until the `patternMatcher` phase. After pattern matcher // transforms the patterns into terms, the `inlinePatterns` phase removes this anonymous class by β-reducing // the call to the `unapply`. - object SplitFunAndGivenArgs: - def unapply(tree: Tree): (Tree, List[List[Tree]]) = tree match - case Apply(SplitFunAndGivenArgs(fn, argss), args) => (fn, argss :+ args) - case _ => (tree, Nil) - val UnApply(SplitFunAndGivenArgs(fun, leadingImplicits), trailingImplicits, patterns) = unapp - if leadingImplicits.flatten.nonEmpty then - // To support them see https://github.com/lampepfl/dotty/pull/13158 - report.error("inline unapply methods with given parameters before the scrutinee are not supported", fun) + val UnApply(fun, trailingImplicits, patterns) = unapp val sym = unapp.symbol var unapplySym1: Symbol = NoSymbol // created from within AnonClass() and used afterwards val newUnapply = AnonClass(ctx.owner, List(defn.ObjectType), sym.coord) { cls => - val targs = fun match - case TypeApply(_, targs) => targs - case _ => Nil - val unapplyInfo = sym.info match - case info: PolyType => info.instantiate(targs.map(_.tpe)) - case info => info - - val unapplySym = newSymbol(cls, sym.name.toTermName, Synthetic | Method, unapplyInfo, coord = sym.coord).entered + // `fun` is a partially applied method that contains all type applications of the method. + // The methodic type `fun.tpe.widen` is the type of the function starting from the scrutinee argument + // and its type parameters are instantiated. + val unapplySym = newSymbol(cls, sym.name.toTermName, Synthetic | Method, fun.tpe.widen, coord = sym.coord).entered val unapply = DefDef(unapplySym.asTerm, argss => - inlineCall(fun.appliedToArgss(argss).withSpan(unapp.span))(using ctx.withOwner(unapplySym)) + val body = fun.appliedToArgss(argss).withSpan(unapp.span) + if body.symbol.is(Transparent) then inlineCall(body)(using ctx.withOwner(unapplySym)) + else body ) unapplySym1 = unapplySym List(unapply) @@ -235,8 +230,8 @@ object Inlines: val retainer = meth.copy( name = BodyRetainerName(meth.name), - flags = meth.flags &~ (Inline | Macro | Override) | Private, - coord = mdef.rhs.span.startPos).asTerm + flags = (meth.flags &~ (Inline | Macro | Override | AbsOverride)) | Private, + coord = mdef.rhs.span.startPos).asTerm.entered retainer.deriveTargetNameAnnotation(meth, name => BodyRetainerName(name.asTermName)) DefDef(retainer, prefss => inlineCall( @@ -386,8 +381,7 @@ object Inlines: /** Expand call to scala.compiletime.codeOf */ def codeOf(arg: Tree, pos: SrcPos)(using Context): Tree = - val ctx1 = ctx.fresh.setSetting(ctx.settings.color, "never") - Literal(Constant(arg.show(using ctx1))).withSpan(pos.span) + Literal(Constant(arg.show(using ctx.withoutColors))).withSpan(pos.span) end Intrinsics /** Produces an inlined version of `call` via its `inlined` method. @@ -439,8 +433,7 @@ object Inlines: val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span) evidence.tpe match case fail: Implicits.SearchFailureType => - val msg = evTyper.missingArgMsg(evidence, tpt.tpe, "") - errorTree(call, em"$msg") + errorTree(call, evTyper.missingArgMsg(evidence, tpt.tpe, "")) case _ => evidence } diff --git a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala index 7e47bbfdfa8a..060c8d21f390 100644 --- a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala @@ -17,11 +17,12 @@ import NameKinds.{InlineAccessorName, UniqueInlineName} import inlines.Inlines import NameOps._ import Annotations._ -import transform.{AccessProxies, PCPCheckAndHeal, Splicer} +import transform.{AccessProxies, Splicer} +import staging.CrossStageSafety import transform.SymUtils.* import config.Printers.inlining import util.Property -import dotty.tools.dotc.transform.TreeMapWithStages._ +import staging.StagingLevel object PrepareInlineable { import tpd._ @@ -73,7 +74,7 @@ object PrepareInlineable { !sym.isContainedIn(inlineSym) && !(sym.isStableMember && sym.info.widenTermRefExpr.isInstanceOf[ConstantType]) && !sym.isInlineMethod && - (Inlines.inInlineMethod || StagingContext.level > 0) + (Inlines.inInlineMethod || StagingLevel.level > 0) def preTransform(tree: Tree)(using Context): Tree @@ -85,14 +86,7 @@ object PrepareInlineable { } override def transform(tree: Tree)(using Context): Tree = - inContext(stagingContext(tree)) { - postTransform(super.transform(preTransform(tree))) - } - - private def stagingContext(tree: Tree)(using Context): Context = tree match - case tree: Apply if tree.symbol.isQuote => StagingContext.quoteContext - case tree: Apply if tree.symbol.isExprSplice => StagingContext.spliceContext - case _ => ctx + postTransform(super.transform(preTransform(tree))) } /** Direct approach: place the accessor with the accessed symbol. This has the @@ -153,7 +147,7 @@ object PrepareInlineable { val qual = qualifier(refPart) inlining.println(i"adding receiver passing inline accessor for $tree/$refPart -> (${qual.tpe}, $refPart: ${refPart.getClass}, $argss%, %") - // Need to dealias in order to cagtch all possible references to abstracted over types in + // Need to dealias in order to catch all possible references to abstracted over types in // substitutions val dealiasMap = new TypeMap { def apply(t: Type) = mapOver(t.dealias) @@ -255,7 +249,7 @@ object PrepareInlineable { /** Register inline info for given inlineable method `sym`. * - * @param sym The symbol denotation of the inlineable method for which info is registered + * @param inlined The symbol denotation of the inlineable method for which info is registered * @param treeExpr A function that computes the tree to be inlined, given a context * This tree may still refer to non-public members. * @param ctx The context to use for evaluating `treeExpr`. It needs @@ -284,16 +278,16 @@ object PrepareInlineable { private def checkInlineMethod(inlined: Symbol, body: Tree)(using Context): body.type = { if Inlines.inInlineMethod(using ctx.outer) then - report.error(ex"Implementation restriction: nested inline methods are not supported", inlined.srcPos) + report.error(em"Implementation restriction: nested inline methods are not supported", inlined.srcPos) if (inlined.is(Macro) && !ctx.isAfterTyper) { def checkMacro(tree: Tree): Unit = tree match { - case Spliced(code) => + case Splice(code) => if (code.symbol.flags.is(Inline)) report.error("Macro cannot be implemented with an `inline` method", code.srcPos) Splicer.checkValidMacroBody(code) - new PCPCheckAndHeal(freshStagingContext).transform(body) // Ignore output, only check PCP + (new CrossStageSafety).transform(body) // Ignore output, only check cross-stage safety case Block(List(stat), Literal(Constants.Constant(()))) => checkMacro(stat) case Block(Nil, expr) => checkMacro(expr) case Typed(expr, _) => checkMacro(expr) diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 6af34dc88362..e4d0cce9f6f9 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -17,6 +17,7 @@ import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol, defn, newSymbol} import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.SymDenotations.SymDenotation import dotty.tools.dotc.core.TypeError +import dotty.tools.dotc.core.Phases import dotty.tools.dotc.core.Types.{AppliedType, ExprType, MethodOrPoly, NameFilter, NoType, RefinedType, TermRef, Type, TypeProxy} import dotty.tools.dotc.parsing.Tokens import dotty.tools.dotc.util.Chars diff --git a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala index 6b2237a09b3f..fd6d426f39bb 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala @@ -313,8 +313,8 @@ object Interactive { case _ => } localCtx - case tree @ Template(constr, parents, self, _) => - if ((constr :: self :: parents).contains(nested)) outer + case tree @ Template(constr, _, self, _) => + if ((constr :: self :: tree.parentsOrDerived).contains(nested)) outer else contextOfStat(tree.body, nested, tree.symbol, outer.inClassContext(self.symbol)) case _ => outer diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 4611554a01a3..6ec896dcb200 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -20,7 +20,8 @@ import StdNames._ import reporting._ import dotty.tools.dotc.util.SourceFile import util.Spans._ -import scala.collection.mutable.ListBuffer + +import scala.collection.mutable.{ListBuffer, LinkedHashMap} object JavaParsers { @@ -71,10 +72,10 @@ object JavaParsers { } } - def syntaxError(msg: String, skipIt: Boolean): Unit = + def syntaxError(msg: Message, skipIt: Boolean): Unit = syntaxError(in.offset, msg, skipIt) - def syntaxError(offset: Int, msg: String, skipIt: Boolean): Unit = { + def syntaxError(offset: Int, msg: Message, skipIt: Boolean): Unit = { if (offset > lastErrorOffset) { syntaxError(msg, offset) // no more errors on this token. @@ -96,8 +97,12 @@ object JavaParsers { def javaLangDot(name: Name): Tree = Select(javaDot(nme.lang), name) + /** Tree representing `java.lang.Object` */ def javaLangObject(): Tree = javaLangDot(tpnme.Object) + /** Tree representing `java.lang.Record` */ + def javaLangRecord(): Tree = javaLangDot(tpnme.Record) + def arrayOf(tpt: Tree): AppliedTypeTree = AppliedTypeTree(scalaDot(tpnme.Array), List(tpt)) @@ -178,9 +183,7 @@ object JavaParsers { if (in.token != token) { val offsetToReport = in.offset val msg = - tokenString(token) + " expected but " + - tokenString(in.token) + " found." - + em"${tokenString(token)} expected but ${tokenString(in.token)} found." syntaxError(offsetToReport, msg, skipIt = true) } if (in.token == token) in.nextToken() @@ -271,7 +274,7 @@ object JavaParsers { case FLOAT => in.nextToken(); TypeTree(FloatType) case DOUBLE => in.nextToken(); TypeTree(DoubleType) case BOOLEAN => in.nextToken(); TypeTree(BooleanType) - case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree + case _ => syntaxError(em"illegal start of type", skipIt = true); errorTypeTree } } @@ -557,6 +560,14 @@ object JavaParsers { def definesInterface(token: Int): Boolean = token == INTERFACE || token == AT + /** If the next token is the identifier "record", convert it into the RECORD token. + * This makes it easier to handle records in various parts of the code, + * in particular when a `parentToken` is passed to some functions. + */ + def adaptRecordIdentifier(): Unit = + if in.token == IDENTIFIER && in.name == jnme.RECORDid then + in.token = RECORD + def termDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = { val inInterface = definesInterface(parentToken) val tparams = if (in.token == LT) typeParams(Flags.JavaDefined | Flags.Param) else List() @@ -583,6 +594,16 @@ object JavaParsers { TypeTree(), methodBody()).withMods(mods) } } + } else if (in.token == LBRACE && rtptName != nme.EMPTY && parentToken == RECORD) { + /* + record RecordName(T param1, ...) { + RecordName { // <- here + // methodBody + } + } + */ + methodBody() + Nil } else { var mods1 = mods @@ -719,12 +740,11 @@ object JavaParsers { ValDef(name, tpt2, if (mods.is(Flags.Param)) EmptyTree else unimplementedExpr).withMods(mods1) } - def memberDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match { - case CLASS | ENUM | INTERFACE | AT => - typeDecl(start, if (definesInterface(parentToken)) mods | Flags.JavaStatic else mods) + def memberDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match + case CLASS | ENUM | RECORD | INTERFACE | AT => + typeDecl(start, if definesInterface(parentToken) then mods | Flags.JavaStatic else mods) case _ => termDecl(start, mods, parentToken, parentTParams) - } def makeCompanionObject(cdef: TypeDef, statics: List[Tree]): Tree = atSpan(cdef.span) { @@ -762,7 +782,7 @@ object JavaParsers { accept(SEMI) val names = buf.toList if (names.length < 2) { - syntaxError(start, "illegal import", skipIt = false) + syntaxError(start, em"illegal import", skipIt = false) List() } else { @@ -806,6 +826,51 @@ object JavaParsers { addCompanionObject(statics, cls) } + def recordDecl(start: Offset, mods: Modifiers): List[Tree] = + accept(RECORD) + val nameOffset = in.offset + val name = identForType() + val tparams = typeParams() + val header = formalParams() + val superclass = javaLangRecord() // records always extend java.lang.Record + val interfaces = interfacesOpt() // records may implement interfaces + val (statics, body) = typeBody(RECORD, name, tparams) + + // We need to generate accessors for every param, if no method with the same name is already defined + + var fieldsByName = header.map(v => (v.name, (v.tpt, v.mods.annotations))).to(LinkedHashMap) + + for case DefDef(name, paramss, _, _) <- body + if paramss.isEmpty && fieldsByName.contains(name) + do + fieldsByName -= name + end for + + val accessors = + (for (name, (tpt, annots)) <- fieldsByName yield + DefDef(name, Nil, tpt, unimplementedExpr) + .withMods(Modifiers(Flags.JavaDefined | Flags.Method | Flags.Synthetic)) + ).toList + + // generate the canonical constructor + val canonicalConstructor = + DefDef(nme.CONSTRUCTOR, joinParams(tparams, List(header)), TypeTree(), EmptyTree) + .withMods(Modifiers(Flags.JavaDefined | Flags.Synthetic, mods.privateWithin)) + + // return the trees + val recordTypeDef = atSpan(start, nameOffset) { + TypeDef(name, + makeTemplate( + parents = superclass :: interfaces, + stats = canonicalConstructor :: accessors ::: body, + tparams = tparams, + true + ) + ).withMods(mods) + } + addCompanionObject(statics, recordTypeDef) + end recordDecl + def interfaceDecl(start: Offset, mods: Modifiers): List[Tree] = { accept(INTERFACE) val nameOffset = in.offset @@ -822,7 +887,7 @@ object JavaParsers { val iface = atSpan(start, nameOffset) { TypeDef( name, - makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.Trait | Flags.JavaInterface | Flags.Abstract) + makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.JavaInterface) } addCompanionObject(statics, iface) } @@ -848,7 +913,8 @@ object JavaParsers { else if (in.token == SEMI) in.nextToken() else { - if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.JavaStatic + adaptRecordIdentifier() + if (in.token == ENUM || in.token == RECORD || definesInterface(in.token)) mods |= Flags.JavaStatic val decls = memberDecl(start, mods, parentToken, parentTParams) (if (mods.is(Flags.JavaStatic) || inInterface && !(decls exists (_.isInstanceOf[DefDef]))) statics @@ -858,10 +924,9 @@ object JavaParsers { } (statics.toList, members.toList) } - def annotationParents: List[Select] = List( - scalaAnnotationDot(tpnme.Annotation), - Select(javaLangDot(nme.annotation), tpnme.Annotation), - scalaAnnotationDot(tpnme.ClassfileAnnotation) + def annotationParents: List[Tree] = List( + javaLangObject(), + Select(javaLangDot(nme.annotation), tpnme.Annotation) ) def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = { accept(AT) @@ -877,7 +942,7 @@ object JavaParsers { List(constructorParams), TypeTree(), EmptyTree).withMods(Modifiers(Flags.JavaDefined)) val templ = makeTemplate(annotationParents, constr :: body, List(), true) val annot = atSpan(start, nameOffset) { - TypeDef(name, templ).withMods(mods | Flags.Abstract) + TypeDef(name, templ).withMods(mods | Flags.JavaInterface | Flags.JavaAnnotation) } addCompanionObject(statics, annot) } @@ -950,13 +1015,13 @@ object JavaParsers { } } - def typeDecl(start: Offset, mods: Modifiers): List[Tree] = in.token match { + def typeDecl(start: Offset, mods: Modifiers): List[Tree] = in.token match case ENUM => enumDecl(start, mods) case INTERFACE => interfaceDecl(start, mods) case AT => annotationDecl(start, mods) case CLASS => classDecl(start, mods) - case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) - } + case RECORD => recordDecl(start, mods) + case _ => in.nextToken(); syntaxError(em"illegal start of type declaration", skipIt = true); List(errorTypeTree) def tryConstant: Option[Constant] = { val negate = in.token match { @@ -1007,6 +1072,7 @@ object JavaParsers { if (in.token != EOF) { val start = in.offset val mods = modifiers(inInterface = false) + adaptRecordIdentifier() // needed for typeDecl buf ++= typeDecl(start, mods) } } diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala index 1be8bdae6bd1..d21d4b85b5df 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala @@ -10,6 +10,7 @@ import JavaTokens._ import scala.annotation.{switch, tailrec} import util.Chars._ import PartialFunction.cond +import core.Decorators.em object JavaScanners { @@ -108,7 +109,7 @@ object JavaScanners { setStrVal() nextChar() else - error("unclosed string literal") + error(em"unclosed string literal") else nextChar() if ch != '\"' then // "" empty string literal @@ -127,7 +128,7 @@ object JavaScanners { setStrVal() } else - error("unclosed character literal") + error(em"unclosed character literal") case '=' => token = EQUALS @@ -298,7 +299,7 @@ object JavaScanners { nextChar() token = DOTDOTDOT } - else error("`.` character expected") + else error(em"`.` character expected") } case ';' => @@ -336,7 +337,7 @@ object JavaScanners { case SU => if (isAtEnd) token = EOF else { - error("illegal character") + error(em"illegal character") nextChar() } @@ -347,7 +348,7 @@ object JavaScanners { getIdentRest() } else { - error("illegal character: " + ch.toInt) + error(em"illegal character: ${ch.toInt}") nextChar() } } @@ -360,7 +361,7 @@ object JavaScanners { case _ => nextChar(); skipLineComment() } @tailrec def skipJavaComment(): Unit = ch match { - case SU => incompleteInputError("unclosed comment") + case SU => incompleteInputError(em"unclosed comment") case '*' => nextChar(); if (ch == '/') nextChar() else skipJavaComment() case _ => nextChar(); skipJavaComment() } @@ -480,7 +481,7 @@ object JavaScanners { nextChar() } if (ch != LF && ch != CR) { // CR-LF is already normalized into LF by `JavaCharArrayReader` - error("illegal text block open delimiter sequence, missing line terminator") + error(em"illegal text block open delimiter sequence, missing line terminator") return } nextChar() @@ -529,7 +530,7 @@ object JavaScanners { // Bail out if the block never did have an end if (!blockClosed) { - error("unclosed text block") + error(em"unclosed text block") return } @@ -642,14 +643,14 @@ object JavaScanners { while (i < len) { val d = digit2int(strVal.charAt(i), base) if (d < 0) { - error("malformed integer number") + error(em"malformed integer number") return 0 } if (value < 0 || limit / (base / divider) < value || limit - (d / divider) < value * (base / divider) && !(negated && limit == value * base - 1 + d)) { - error("integer number too large") + error(em"integer number too large") return 0 } value = value * base + d @@ -666,11 +667,11 @@ object JavaScanners { try { val value: Double = java.lang.Double.valueOf(strVal.toString).nn.doubleValue() if (value > limit) - error("floating point number too large") + error(em"floating point number too large") if (negated) -value else value } catch { case _: NumberFormatException => - error("malformed floating point number") + error(em"malformed floating point number") 0.0 } } diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala index 3e73b6d95adb..2b7882173e00 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala @@ -41,6 +41,9 @@ object JavaTokens extends TokensCommon { inline val SWITCH = 133; enter(SWITCH, "switch") inline val ASSERT = 134; enter(ASSERT, "assert") + /** contextual keywords (turned into keywords in certain conditions, see JLS 3.9 of Java 9+) */ + inline val RECORD = 135; enter(RECORD, "record") + /** special symbols */ inline val EQEQ = 140 inline val BANGEQ = 141 diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index e108e2d9cbeb..7a29ac3f7a38 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -143,21 +143,12 @@ object Parsers { syntaxError(msg, Span(offset, offset + length)) lastErrorOffset = in.offset - def syntaxError(msg: => String, offset: Int): Unit = - syntaxError(msg.toMessage, offset) - - def syntaxError(msg: => String): Unit = - syntaxError(msg, in.offset) - /** Unconditionally issue an error at given span, without * updating lastErrorOffset. */ def syntaxError(msg: Message, span: Span): Unit = report.error(msg, source.atSpan(span)) - def syntaxError(msg: => String, span: Span): Unit = - syntaxError(msg.toMessage, span) - def unimplementedExpr(using Context): Select = Select(scalaDot(nme.Predef), nme.???) } @@ -199,6 +190,8 @@ object Parsers { def isPureArrow(name: Name): Boolean = isIdent(name) && Feature.pureFunsEnabled def isPureArrow: Boolean = isPureArrow(nme.PUREARROW) || isPureArrow(nme.PURECTXARROW) def isErased = isIdent(nme.erased) && in.erasedEnabled + // Are we seeing an `erased` soft keyword that will not be an identifier? + def isErasedKw = isErased && in.isSoftModifierInParamModifierPosition def isSimpleLiteral = simpleLiteralTokens.contains(in.token) || isIdent(nme.raw.MINUS) && numericLitTokens.contains(in.lookahead.token) @@ -288,9 +281,6 @@ object Parsers { syntaxError(msg, offset) skip() - def syntaxErrorOrIncomplete(msg: => String): Unit = - syntaxErrorOrIncomplete(msg.toMessage, in.offset) - def syntaxErrorOrIncomplete(msg: Message, span: Span): Unit = if in.token == EOF then incompleteInputError(msg) @@ -346,7 +336,7 @@ object Parsers { in.nextToken() recur(true, endSeen) else if in.token == END then - if endSeen then syntaxError("duplicate end marker") + if endSeen then syntaxError(em"duplicate end marker") checkEndMarker(stats) recur(sepSeen, endSeen = true) else if isStatSeqEnd || in.token == altEnd then @@ -358,7 +348,7 @@ object Parsers { val statFollows = mustStartStatTokens.contains(found) syntaxError( if noPrevStat then IllegalStartOfStatement(what, isModifier, statFollows) - else i"end of $what expected but ${showToken(found)} found".toMessage) + else em"end of $what expected but ${showToken(found)} found") if mustStartStatTokens.contains(found) then false // it's a statement that might be legal in an outer context else @@ -460,7 +450,7 @@ object Parsers { */ def convertToParam(tree: Tree, mods: Modifiers): ValDef = def fail() = - syntaxError(s"not a legal formal parameter for a function literal", tree.span) + syntaxError(em"not a legal formal parameter for a function literal", tree.span) makeParameter(nme.ERROR, tree, mods) tree match case param: ValDef => @@ -475,6 +465,15 @@ object Parsers { case _ => fail() + /** Checks that tuples don't contain a parameter. */ + def checkNonParamTuple(t: Tree) = t match + case Tuple(ts) => ts.collectFirst { + case param: ValDef => + syntaxError(em"invalid parameter definition syntax in tuple value", param.span) + } + case _ => + + /** Convert (qual)ident to type identifier */ def convertToTypeId(tree: Tree): Tree = tree match { @@ -618,11 +617,11 @@ object Parsers { if in.isNewLine && !(nextIndentWidth < startIndentWidth) then warning( if startIndentWidth <= nextIndentWidth then - i"""Line is indented too far to the right, or a `{` is missing before: - | - |${t.tryToShow}""".toMessage + em"""Line is indented too far to the right, or a `{` is missing before: + | + |${t.tryToShow}""" else - in.spaceTabMismatchMsg(startIndentWidth, nextIndentWidth).toMessage, + in.spaceTabMismatchMsg(startIndentWidth, nextIndentWidth), in.next.offset ) t @@ -635,7 +634,7 @@ object Parsers { if in.isNewLine then val nextIndentWidth = in.indentWidth(in.next.offset) if in.currentRegion.indentWidth < nextIndentWidth then - warning(i"Line is indented too far to the right, or a `{` or `:` is missing".toMessage, in.next.offset) + warning(em"Line is indented too far to the right, or a `{` or `:` is missing", in.next.offset) /* -------- REWRITES ----------------------------------------------------------- */ @@ -716,7 +715,11 @@ object Parsers { val t = enclosed(INDENT, body) if needsBraces(t) then patch(source, Span(startOpening, endOpening), " {") - patch(source, Span(closingOffset(source.nextLine(in.lastOffset))), indentWidth.toPrefix ++ "}\n") + val next = in.next + def closedByEndMarker = + next.token == END && (next.offset - next.lineOffset) == indentWidth.toPrefix.size + if closedByEndMarker then patch(source, Span(next.offset), "} // ") + else patch(source, Span(closingOffset(source.nextLine(in.lastOffset))), indentWidth.toPrefix ++ "}\n") t end indentedToBraces @@ -778,7 +781,7 @@ object Parsers { } }) canRewrite &= (in.isAfterLineEnd || statCtdTokens.contains(in.token)) // test (5) - if (canRewrite && (!underColonSyntax || in.fewerBracesEnabled)) { + if canRewrite && (!underColonSyntax || Feature.fewerBracesEnabled) then val openingPatchStr = if !colonRequired then "" else if testChar(startOpening - 1, Chars.isOperatorPart(_)) then " :" @@ -786,7 +789,6 @@ object Parsers { val (startClosing, endClosing) = closingElimRegion() patch(source, Span(startOpening, endOpening), openingPatchStr) patch(source, Span(startClosing, endClosing), "") - } t } @@ -957,7 +959,7 @@ object Parsers { lookahead.isArrow && { lookahead.nextToken() - lookahead.token == INDENT + lookahead.token == INDENT || lookahead.token == EOF } lookahead.nextToken() if lookahead.isIdent || lookahead.token == USCORE then @@ -968,29 +970,6 @@ object Parsers { isArrowIndent() else false - /** Under captureChecking language import: is the following token sequence a - * capture set `{ref1, ..., refN}` followed by a token that can start a type? - */ - def followingIsCaptureSet(): Boolean = - Feature.ccEnabled && { - val lookahead = in.LookaheadScanner() - def followingIsTypeStart() = - lookahead.nextToken() - canStartInfixTypeTokens.contains(lookahead.token) - || lookahead.token == LBRACKET - def recur(): Boolean = - (lookahead.isIdent || lookahead.token == THIS) && { - lookahead.nextToken() - if lookahead.token == COMMA then - lookahead.nextToken() - recur() - else - lookahead.token == RBRACE && followingIsTypeStart() - } - lookahead.nextToken() - if lookahead.token == RBRACE then followingIsTypeStart() else recur() - } - /* --------- OPERAND/OPERATOR STACK --------------------------------------- */ var opStack: List[OpInfo] = Nil @@ -1025,7 +1004,7 @@ object Parsers { * body */ def isColonLambda = - in.fewerBracesEnabled && in.token == COLONfollow && followingIsLambdaAfterColon() + Feature.fewerBracesEnabled && in.token == COLONfollow && followingIsLambdaAfterColon() /** operand { infixop operand | MatchClause } [postfixop], * @@ -1082,7 +1061,7 @@ object Parsers { val name = in.name if name == nme.CONSTRUCTOR || name == nme.STATIC_CONSTRUCTOR then report.error( - i"""Illegal backquoted identifier: `` and `` are forbidden""", + em"""Illegal backquoted identifier: `` and `` are forbidden""", in.sourcePos()) in.nextToken() name @@ -1235,7 +1214,7 @@ object Parsers { null } catch { - case ex: FromDigitsException => syntaxErrorOrIncomplete(ex.getMessage) + case ex: FromDigitsException => syntaxErrorOrIncomplete(ex.getMessage.toMessage) } Literal(Constant(value)) } @@ -1264,7 +1243,7 @@ object Parsers { } } in.nextToken() - Quote(t) + Quote(t, Nil) } else if !in.featureEnabled(Feature.symbolLiterals) then @@ -1353,11 +1332,16 @@ object Parsers { // note: next is defined here because current == NEWLINE if (in.token == NEWLINE && p(in.next.token)) newLineOpt() - def colonAtEOLOpt(): Unit = { + def acceptIndent() = + if in.token != INDENT then + syntaxErrorOrIncomplete(em"indented definitions expected, ${in} found") + + def colonAtEOLOpt(): Unit = possibleColonOffset = in.lastOffset in.observeColonEOL(inTemplate = false) - if in.token == COLONeol then in.nextToken() - } + if in.token == COLONeol then + in.nextToken() + acceptIndent() def argumentStart(): Unit = colonAtEOLOpt() @@ -1365,9 +1349,9 @@ object Parsers { in.nextToken() if in.indentWidth(in.offset) == in.currentRegion.indentWidth then report.errorOrMigrationWarning( - i"""This opening brace will start a new statement in Scala 3. - |It needs to be indented to the right to keep being treated as - |an argument to the previous expression.${rewriteNotice()}""", + em"""This opening brace will start a new statement in Scala 3. + |It needs to be indented to the right to keep being treated as + |an argument to the previous expression.${rewriteNotice()}""", in.sourcePos(), from = `3.0`) patch(source, Span(in.offset), " ") @@ -1377,8 +1361,7 @@ object Parsers { if in.lookahead.token == END then in.token = NEWLINE else in.nextToken() - if in.token != INDENT && in.token != LBRACE then - syntaxErrorOrIncomplete(i"indented definitions expected, ${in} found") + if in.token != LBRACE then acceptIndent() else newLineOptWhenFollowedBy(LBRACE) @@ -1419,10 +1402,7 @@ object Parsers { if in.token == END then val start = in.skipToken() if stats.isEmpty || !matchesAndSetEnd(stats.last) then - syntaxError("misaligned end marker", Span(start, in.lastCharOffset)) - else if overlapsPatch(source, Span(start, start)) then - patch(source, Span(start, start), "") - patch(source, Span(start, in.lastCharOffset), s"} // end $endName") + syntaxError(em"misaligned end marker", Span(start, in.lastCharOffset)) in.token = IDENTIFIER // Leaving it as the original token can confuse newline insertion in.nextToken() end checkEndMarker @@ -1434,13 +1414,30 @@ object Parsers { */ def toplevelTyp(): Tree = rejectWildcardType(typ()) - private def isFunction(tree: Tree): Boolean = tree match { - case Parens(tree1) => isFunction(tree1) - case Block(Nil, tree1) => isFunction(tree1) - case _: Function => true - case _ => false + private def getFunction(tree: Tree): Option[Function] = tree match { + case Parens(tree1) => getFunction(tree1) + case Block(Nil, tree1) => getFunction(tree1) + case t: Function => Some(t) + case _ => None } + private def checkFunctionNotErased(f: Function, context: String) = + def fail(span: Span) = + syntaxError(em"Implementation restriction: erased parameters are not supported in $context", span) + // erased parameter in type + val hasErasedParam = f match + case f: FunctionWithMods => f.hasErasedParams + case _ => false + if hasErasedParam then + fail(f.span) + // erased parameter in term + val hasErasedMods = f.args.collectFirst { + case v: ValDef if v.mods.is(Flags.Erased) => v + } + hasErasedMods match + case Some(param) => fail(param.span) + case _ => + /** CaptureRef ::= ident | `this` */ def captureRef(): Tree = @@ -1455,17 +1452,21 @@ object Parsers { if in.token == RBRACE then Nil else commaSeparated(captureRef) } + def capturesAndResult(core: () => Tree): Tree = + if Feature.ccEnabled && in.token == LBRACE && in.offset == in.lastOffset + then CapturesAndResult(captureSet(), core()) + else core() + /** Type ::= FunType * | HkTypeParamClause ‘=>>’ Type * | FunParamClause ‘=>>’ Type * | MatchType * | InfixType - * | CaptureSet Type -- under captureChecking * FunType ::= (MonoFunType | PolyFunType) * MonoFunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type - * | (‘->’ | ‘?->’ ) Type -- under pureFunctions + * | (‘->’ | ‘?->’ ) [CaptureSet] Type -- under pureFunctions * PolyFunType ::= HKTypeParamClause '=>' Type - * | HKTypeParamClause ‘->’ Type -- under pureFunctions + * | HKTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions * FunTypeArgs ::= InfixType * | `(' [ [ ‘[using]’ ‘['erased'] FunArgType {`,' FunArgType } ] `)' * | '(' [ ‘[using]’ ‘['erased'] TypedFunParam {',' TypedFunParam } ')' @@ -1473,13 +1474,17 @@ object Parsers { def typ(): Tree = val start = in.offset var imods = Modifiers() + var erasedArgs: ListBuffer[Boolean] = ListBuffer() def functionRest(params: List[Tree]): Tree = val paramSpan = Span(start, in.lastOffset) atSpan(start, in.offset) { var token = in.token + var isPure = false if isPureArrow(nme.PUREARROW) then + isPure = true token = ARROW else if isPureArrow(nme.PURECTXARROW) then + isPure = true token = CTXARROW else if token == TLARROW then if !imods.flags.isEmpty || params.isEmpty then @@ -1498,16 +1503,16 @@ object Parsers { else accept(ARROW) - val resultType = typ() + val resultType = if isPure then capturesAndResult(typ) else typ() if token == TLARROW then for case ValDef(_, tpt, _) <- params do if isByNameType(tpt) then syntaxError(em"parameter of type lambda may not be call-by-name", tpt.span) TermLambdaTypeTree(params.asInstanceOf[List[ValDef]], resultType) - else if imods.isOneOf(Given | Erased | Impure) then + else if imods.isOneOf(Given | Impure) || erasedArgs.contains(true) then if imods.is(Given) && params.isEmpty then - syntaxError("context function types require at least one parameter", paramSpan) - FunctionWithMods(params, resultType, imods) + syntaxError(em"context function types require at least one parameter", paramSpan) + FunctionWithMods(params, resultType, imods, erasedArgs.toList) else if !ctx.settings.YkindProjector.isDefault then val (newParams :+ newResultType, tparams) = replaceKindProjectorPlaceholders(params :+ resultType): @unchecked lambdaAbstract(tparams, Function(newParams, newResultType)) @@ -1525,17 +1530,30 @@ object Parsers { functionRest(Nil) } else { - if isErased then imods = addModifier(imods) val paramStart = in.offset + def addErased() = + erasedArgs.addOne(isErasedKw) + if isErasedKw then { in.skipToken(); } + addErased() val ts = in.currentRegion.withCommasExpected { funArgType() match case Ident(name) if name != tpnme.WILDCARD && in.isColon => isValParamList = true + def funParam(start: Offset, mods: Modifiers) = { + atSpan(start) { + addErased() + typedFunParam(in.offset, ident(), imods) + } + } commaSeparatedRest( typedFunParam(paramStart, name.toTermName, imods), - () => typedFunParam(in.offset, ident(), imods)) + () => funParam(in.offset, imods)) case t => - commaSeparatedRest(t, funArgType) + def funParam() = { + addErased() + funArgType() + } + commaSeparatedRest(t, funParam) } accept(RPAREN) if isValParamList || in.isArrow || isPureArrow then @@ -1566,30 +1584,33 @@ object Parsers { val arrowOffset = in.skipToken() val body = toplevelTyp() atSpan(start, arrowOffset) { - if (isFunction(body)) - PolyFunction(tparams, body) - else { - syntaxError("Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) - Ident(nme.ERROR.toTypeName) + getFunction(body) match { + case Some(f) => + checkFunctionNotErased(f, "poly function") + PolyFunction(tparams, body) + case None => + syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) + Ident(nme.ERROR.toTypeName) } } } else { accept(TLARROW); typ() } } - else if in.token == LBRACE && followingIsCaptureSet() then - CapturingTypeTree(captureSet(), typ()) else if (in.token == INDENT) enclosed(INDENT, typ()) else infixType() in.token match - case ARROW | CTXARROW => functionRest(t :: Nil) + case ARROW | CTXARROW => + erasedArgs.addOne(false) + functionRest(t :: Nil) case MATCH => matchType(t) case FORSOME => syntaxError(ExistentialTypesNoLongerSupported()); t case _ => if isPureArrow then + erasedArgs.addOne(false) functionRest(t :: Nil) else - if (imods.is(Erased) && !t.isInstanceOf[FunctionWithMods]) + if (erasedArgs.contains(true) && !t.isInstanceOf[FunctionWithMods]) syntaxError(ErasedTypesCanOnlyBeFunctionTypes(), implicitKwPos(start)) t end typ @@ -1644,6 +1665,7 @@ object Parsers { if in.token == LPAREN then funParamClause() :: funParamClauses() else Nil /** InfixType ::= RefinedType {id [nl] RefinedType} + * | RefinedType `^` */ def infixType(): Tree = infixTypeRest(refinedType()) @@ -1651,19 +1673,41 @@ object Parsers { infixOps(t, canStartInfixTypeTokens, refinedTypeFn, Location.ElseWhere, ParseKind.Type, isOperator = !followingIsVararg() && !isPureArrow) - /** RefinedType ::= WithType {[nl] Refinement} + /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] */ val refinedTypeFn: Location => Tree = _ => refinedType() def refinedType() = refinedTypeRest(withType()) + /** Disambiguation: a `^` is treated as a postfix operator meaning `^{cap}` + * if followed by `{`, `->`, or `?->`, + * or followed by a new line (significant or not), + * or followed by a token that cannot start an infix type. + * Otherwise it is treated as an infix operator. + */ + private def isCaptureUpArrow = + val ahead = in.lookahead + ahead.token == LBRACE + || ahead.isIdent(nme.PUREARROW) + || ahead.isIdent(nme.PURECTXARROW) + || !canStartInfixTypeTokens.contains(ahead.token) + || ahead.lineOffset > 0 + def refinedTypeRest(t: Tree): Tree = { argumentStart() - if (in.isNestedStart) + if in.isNestedStart then refinedTypeRest(atSpan(startOffset(t)) { RefinedTypeTree(rejectWildcardType(t), refinement(indentOK = true)) }) - else t + else if Feature.ccEnabled && in.isIdent(nme.UPARROW) && isCaptureUpArrow then + val upArrowStart = in.offset + in.nextToken() + def cs = + if in.token == LBRACE then captureSet() + else atSpan(upArrowStart)(captureRoot) :: Nil + makeRetaining(t, cs, tpnme.retains) + else + t } /** WithType ::= AnnotType {`with' AnnotType} (deprecated) @@ -1706,10 +1750,10 @@ object Parsers { def splice(isType: Boolean): Tree = val start = in.offset atSpan(in.offset) { + val inPattern = (staged & StageKind.QuotedPattern) != 0 val expr = if (in.name.length == 1) { in.nextToken() - val inPattern = (staged & StageKind.QuotedPattern) != 0 withinStaged(StageKind.Spliced)(if (inPattern) inBraces(pattern()) else stagedBlock()) } else atSpan(in.offset + 1) { @@ -1723,8 +1767,10 @@ object Parsers { val hint = if inPattern then "Use lower cased variable name without the `$` instead" else "To use a given Type[T] in a quote just write T directly" - syntaxError(s"$msg\n\nHint: $hint", Span(start, in.lastOffset)) + syntaxError(em"$msg\n\nHint: $hint", Span(start, in.lastOffset)) Ident(nme.ERROR.toTypeName) + else if inPattern then + SplicePattern(expr, Nil) else Splice(expr) } @@ -1744,7 +1790,7 @@ object Parsers { Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start)) else if sourceVersion.isAtLeast(future) then - deprecationWarning(em"`_` is deprecated for wildcard arguments of types: use `?` instead".toMessage) + deprecationWarning(em"`_` is deprecated for wildcard arguments of types: use `?` instead") patch(source, Span(in.offset, in.offset + 1), "?") val start = in.skipToken() typeBounds().withSpan(Span(start, in.lastOffset, start)) @@ -1805,7 +1851,7 @@ object Parsers { if (!ctx.settings.YkindProjector.isDefault) { def fail(): Tree = { syntaxError( - "λ requires a single argument of the form X => ... or (X, Y) => ...", + em"λ requires a single argument of the form X => ... or (X, Y) => ...", Span(startOffset(t), in.lastOffset) ) AppliedTypeTree(applied, args) @@ -1890,50 +1936,36 @@ object Parsers { def paramTypeOf(core: () => Tree): Tree = if in.token == ARROW || isPureArrow(nme.PUREARROW) then val isImpure = in.token == ARROW - val tp = atSpan(in.skipToken()) { ByNameTypeTree(core()) } - if isImpure && Feature.pureFunsEnabled then ImpureByNameTypeTree(tp) else tp - else if in.token == LBRACE && followingIsCaptureSet() then - val start = in.offset - val cs = captureSet() - val endCsOffset = in.lastOffset - val startTpOffset = in.offset - val tp = paramTypeOf(core) - val tp1 = tp match - case ImpureByNameTypeTree(tp1) => - syntaxError("explicit captureSet is superfluous for impure call-by-name type", start) - tp1 - case CapturingTypeTree(_, tp1: ByNameTypeTree) => - syntaxError("only one captureSet is allowed here", start) - tp1 - case _: ByNameTypeTree if startTpOffset > endCsOffset => - report.warning( - i"""Style: by-name `->` should immediately follow closing `}` of capture set - |to avoid confusion with function type. - |That is, `{c}-> T` instead of `{c} -> T`.""", - source.atSpan(Span(startTpOffset, startTpOffset))) - tp - case _ => - tp - CapturingTypeTree(cs, tp1) + atSpan(in.skipToken()): + val tp = if isImpure then core() else capturesAndResult(core) + if isImpure && Feature.pureFunsEnabled then ImpureByNameTypeTree(tp) + else ByNameTypeTree(tp) else core() + private def maybeInto(tp: () => Tree) = + if in.isIdent(nme.into) + && in.featureEnabled(Feature.into) + && canStartTypeTokens.contains(in.lookahead.token) + then atSpan(in.skipToken()) { Into(tp()) } + else tp() + /** FunArgType ::= Type * | `=>' Type - * | [CaptureSet] `->' Type + * | `->' [CaptureSet] Type */ val funArgType: () => Tree = () => paramTypeOf(typ) /** ParamType ::= ParamValueType * | `=>' ParamValueType - * | [CaptureSet] `->' ParamValueType + * | `->' [CaptureSet] ParamValueType */ def paramType(): Tree = paramTypeOf(paramValueType) - /** ParamValueType ::= Type [`*'] + /** ParamValueType ::= [`into`] Type [`*'] */ def paramValueType(): Tree = { - val t = toplevelTyp() + val t = maybeInto(toplevelTyp) if (isIdent(nme.raw.STAR)) { in.nextToken() atSpan(startOffset(t)) { PostfixOp(t, Ident(tpnme.raw.STAR)) } @@ -1979,7 +2011,7 @@ object Parsers { } :: contextBounds(pname) else if in.token == VIEWBOUND then report.errorOrMigrationWarning( - "view bounds `<%' are no longer supported, use a context bound `:' instead", + em"view bounds `<%' are no longer supported, use a context bound `:' instead", in.sourcePos(), from = `3.0`) atSpan(in.skipToken()) { Function(Ident(pname) :: Nil, toplevelTyp()) @@ -1994,8 +2026,6 @@ object Parsers { def typeDependingOn(location: Location): Tree = if location.inParens then typ() else if location.inPattern then rejectWildcardType(refinedType()) - else if in.token == LBRACE && followingIsCaptureSet() then - CapturingTypeTree(captureSet(), infixType()) else infixType() /* ----------- EXPRESSIONS ------------------------------------------------ */ @@ -2067,7 +2097,7 @@ object Parsers { * | ‘inline’ InfixExpr MatchClause * Bindings ::= `(' [Binding {`,' Binding}] `)' * Binding ::= (id | `_') [`:' Type] - * Ascription ::= `:' [CaptureSet] InfixType + * Ascription ::= `:' InfixType * | `:' Annotation {Annotation} * | `:' `_' `*' * Catches ::= ‘catch’ (Expr | ExprCaseClause) @@ -2080,24 +2110,22 @@ object Parsers { def expr(location: Location): Tree = { val start = in.offset - def isSpecialClosureStart = in.lookahead.isIdent(nme.erased) && in.erasedEnabled in.token match case IMPLICIT => closure(start, location, modifiers(BitSet(IMPLICIT))) - case LPAREN if isSpecialClosureStart => - closure(start, location, Modifiers()) case LBRACKET => val start = in.offset val tparams = typeParamClause(ParamOwner.TypeParam) val arrowOffset = accept(ARROW) val body = expr(location) atSpan(start, arrowOffset) { - if (isFunction(body)) - PolyFunction(tparams, body) - else { - syntaxError("Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) - errorTermTree(arrowOffset) - } + getFunction(body) match + case Some(f) => + checkFunctionNotErased(f, "poly function") + PolyFunction(tparams, f) + case None => + syntaxError(em"Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) + errorTermTree(arrowOffset) } case _ => val saved = placeholderParams @@ -2115,7 +2143,9 @@ object Parsers { else if isWildcard(t) then placeholderParams = placeholderParams ::: saved t - else wrapPlaceholders(t) + else + checkNonParamTuple(t) + wrapPlaceholders(t) } def expr1(location: Location = Location.ElseWhere): Tree = in.token match @@ -2130,8 +2160,8 @@ object Parsers { } case DO => report.errorOrMigrationWarning( - i"""`do while ` is no longer supported, - |use `while ; do ()` instead.${rewriteNotice()}""", + em"""`do while ` is no longer supported, + |use `while ; do ()` instead.${rewriteNotice()}""", in.sourcePos(), from = `3.0`) val start = in.skipToken() atSpan(start) { @@ -2307,10 +2337,8 @@ object Parsers { if in.token == RPAREN then Nil else - var mods1 = mods - if isErased then mods1 = addModifier(mods1) try - commaSeparated(() => binding(mods1)) + commaSeparated(() => binding(mods)) finally accept(RPAREN) else { @@ -2319,7 +2347,7 @@ object Parsers { val t = if ((in.token == COLONop || in.token == COLONfollow) && location == Location.InBlock) { report.errorOrMigrationWarning( - s"This syntax is no longer supported; parameter needs to be enclosed in (...)${rewriteNotice(`future-migration`)}", + em"This syntax is no longer supported; parameter needs to be enclosed in (...)${rewriteNotice(`future-migration`)}", source.atSpan(Span(start, in.lastOffset)), from = future) in.nextToken() @@ -2334,10 +2362,13 @@ object Parsers { (atSpan(start) { makeParameter(name, t, mods) }) :: Nil } - /** Binding ::= (id | `_') [`:' Type] + /** Binding ::= [`erased`] (id | `_') [`:' Type] */ def binding(mods: Modifiers): Tree = - atSpan(in.offset) { makeParameter(bindingName(), typedOpt(), mods) } + atSpan(in.offset) { + val mods1 = if isErasedKw then addModifier(mods) else mods + makeParameter(bindingName(), typedOpt(), mods1) + } def bindingName(): TermName = if (in.token == USCORE) { @@ -2356,7 +2387,7 @@ object Parsers { atSpan(start, in.offset) { if in.token == CTXARROW then if params.isEmpty then - syntaxError("context function literals require at least one formal parameter", Span(start, in.lastOffset)) + syntaxError(em"context function literals require at least one formal parameter", Span(start, in.lastOffset)) in.nextToken() else accept(ARROW) @@ -2370,7 +2401,7 @@ object Parsers { /** PostfixExpr ::= InfixExpr [id [nl]] * InfixExpr ::= PrefixExpr * | InfixExpr id [nl] InfixExpr - * | InfixExpr id `:` IndentedExpr + * | InfixExpr id ColonArgument * | InfixExpr MatchClause */ def postfixExpr(location: Location = Location.ElseWhere): Tree = @@ -2414,10 +2445,11 @@ object Parsers { * | SimpleExpr `.` MatchClause * | SimpleExpr (TypeArgs | NamedTypeArgs) * | SimpleExpr1 ArgumentExprs - * | SimpleExpr1 `:` ColonArgument -- under language.experimental.fewerBraces - * ColonArgument ::= indent (CaseClauses | Block) outdent - * | FunParams (‘=>’ | ‘?=>’) ColonArgBody - * | HkTypeParamClause ‘=>’ ColonArgBody + * | SimpleExpr1 ColonArgument + * ColonArgument ::= colon [LambdaStart] + * indent (CaseClauses | Block) outdent + * LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + * | HkTypeParamClause ‘=>’ * ColonArgBody ::= indent (CaseClauses | Block) outdent * Quoted ::= ‘'’ ‘{’ Block ‘}’ * | ‘'’ ‘[’ Type ‘]’ @@ -2447,10 +2479,10 @@ object Parsers { case QUOTE => atSpan(in.skipToken()) { withinStaged(StageKind.Quoted | (if (location.inPattern) StageKind.QuotedPattern else 0)) { - Quote { + val body = if (in.token == LBRACKET) inBrackets(typ()) else stagedBlock() - } + Quote(body, Nil) } } case NEW => @@ -2533,6 +2565,7 @@ object Parsers { else in.currentRegion.withCommasExpected { var isFormalParams = false def exprOrBinding() = + if isErasedKw then isFormalParams = true if isFormalParams then binding(Modifiers()) else val t = exprInParens() @@ -2778,10 +2811,10 @@ object Parsers { CaseDef(pat, grd, atSpan(accept(ARROW)) { if exprOnly then if in.indentSyntax && in.isAfterLineEnd && in.token != INDENT then - warning(i"""Misleading indentation: this expression forms part of the preceding catch case. - |If this is intended, it should be indented for clarity. - |Otherwise, if the handler is intended to be empty, use a multi-line catch with - |an indented case.""".toMessage) + warning(em"""Misleading indentation: this expression forms part of the preceding catch case. + |If this is intended, it should be indented for clarity. + |Otherwise, if the handler is intended to be empty, use a multi-line catch with + |an indented case.""") expr() else block() }) @@ -2822,11 +2855,25 @@ object Parsers { if (isIdent(nme.raw.BAR)) { in.nextToken(); pattern1(location) :: patternAlts(location) } else Nil - /** Pattern1 ::= Pattern2 [Ascription] + /** Pattern1 ::= PatVar Ascription + * | [‘-’] integerLiteral Ascription + * | [‘-’] floatingPointLiteral Ascription + * | Pattern2 */ def pattern1(location: Location = Location.InPattern): Tree = val p = pattern2() if in.isColon then + val isVariableOrNumber = isVarPattern(p) || p.isInstanceOf[Number] + if !isVariableOrNumber then + report.gradualErrorOrMigrationWarning( + em"""Type ascriptions after patterns other than: + | * variable pattern, e.g. `case x: String =>` + | * number literal pattern, e.g. `case 10.5: Double =>` + |are no longer supported. Remove the type ascription or move it to a separate variable pattern.""", + in.sourcePos(), + warnFrom = `3.3`, + errorFrom = future + ) in.nextToken() ascription(p, location) else p @@ -3003,7 +3050,7 @@ object Parsers { if in.token == THIS then if sourceVersion.isAtLeast(future) then deprecationWarning( - "The [this] qualifier will be deprecated in the future; it should be dropped.".toMessage) + em"The [this] qualifier will be deprecated in the future; it should be dropped.") in.nextToken() mods | Local else mods.withPrivateWithin(ident().toTypeName) @@ -3077,12 +3124,50 @@ object Parsers { /* -------- PARAMETERS ------------------------------------------- */ + /** DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent + * DefParamClause ::= DefTypeParamClause + * | DefTermParamClause + * | UsingParamClause + */ + def typeOrTermParamClauses( + ownerKind: ParamOwner, + numLeadParams: Int = 0 + ): List[List[TypeDef] | List[ValDef]] = + + def recur(firstClause: Boolean, numLeadParams: Int, prevIsTypeClause: Boolean): List[List[TypeDef] | List[ValDef]] = + newLineOptWhenFollowedBy(LPAREN) + newLineOptWhenFollowedBy(LBRACKET) + if in.token == LPAREN then + val paramsStart = in.offset + val params = termParamClause( + numLeadParams, + firstClause = firstClause) + val lastClause = params.nonEmpty && params.head.mods.flags.is(Implicit) + params :: ( + if lastClause then Nil + else recur(firstClause = false, numLeadParams + params.length, prevIsTypeClause = false)) + else if in.token == LBRACKET then + if prevIsTypeClause then + syntaxError( + em"Type parameter lists must be separated by a term or using parameter list", + in.offset + ) + typeParamClause(ownerKind) :: recur(firstClause, numLeadParams, prevIsTypeClause = true) + else Nil + end recur + + recur(firstClause = true, numLeadParams = numLeadParams, prevIsTypeClause = false) + end typeOrTermParamClauses + + /** ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ * ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] * id [HkTypeParamClause] TypeParamBounds * * DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ - * DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds + * DefTypeParam ::= {Annotation} + * [`sealed`] -- under captureChecking + * id [HkTypeParamClause] TypeParamBounds * * TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ * TypTypeParam ::= {Annotation} id [HkTypePamClause] TypeBounds @@ -3092,24 +3177,25 @@ object Parsers { */ def typeParamClause(ownerKind: ParamOwner): List[TypeDef] = inBrackets { - def variance(vflag: FlagSet): FlagSet = - if ownerKind == ParamOwner.Def || ownerKind == ParamOwner.TypeParam then - syntaxError(i"no `+/-` variance annotation allowed here") - in.nextToken() - EmptyFlags - else - in.nextToken() - vflag + def checkVarianceOK(): Boolean = + val ok = ownerKind != ParamOwner.Def && ownerKind != ParamOwner.TypeParam + if !ok then syntaxError(em"no `+/-` variance annotation allowed here") + in.nextToken() + ok def typeParam(): TypeDef = { val isAbstractOwner = ownerKind == ParamOwner.Type || ownerKind == ParamOwner.TypeParam val start = in.offset - val mods = - annotsAsMods() - | (if (ownerKind == ParamOwner.Class) Param | PrivateLocal else Param) - | (if isIdent(nme.raw.PLUS) then variance(Covariant) - else if isIdent(nme.raw.MINUS) then variance(Contravariant) - else EmptyFlags) + var mods = annotsAsMods() | Param + if ownerKind == ParamOwner.Class then mods |= PrivateLocal + if Feature.ccEnabled && in.token == SEALED then + if ownerKind == ParamOwner.Def then mods |= Sealed + else syntaxError(em"`sealed` modifier only allowed for method type parameters") + in.nextToken() + if isIdent(nme.raw.PLUS) && checkVarianceOK() then + mods |= Covariant + else if isIdent(nme.raw.MINUS) && checkVarianceOK() then + mods |= Contravariant atSpan(start, nameStart) { val name = if (isAbstractOwner && in.token == USCORE) { @@ -3130,34 +3216,39 @@ object Parsers { /** ContextTypes ::= FunArgType {‘,’ FunArgType} */ - def contextTypes(ofClass: Boolean, nparams: Int, impliedMods: Modifiers): List[ValDef] = + def contextTypes(ofClass: Boolean, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = val tps = commaSeparated(funArgType) - var counter = nparams + var counter = numLeadParams def nextIdx = { counter += 1; counter } val paramFlags = if ofClass then LocalParamAccessor else Param tps.map(makeSyntheticParameter(nextIdx, _, paramFlags | Synthetic | impliedMods.flags)) - /** ClsParamClause ::= ‘(’ [‘erased’] ClsParams ‘)’ | UsingClsParamClause - * UsingClsParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ + /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause + * UsingClsTermParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ * ClsParams ::= ClsParam {‘,’ ClsParam} * ClsParam ::= {Annotation} * - * DefParamClause ::= ‘(’ [‘erased’] DefParams ‘)’ | UsingParamClause - * UsingParamClause ::= ‘(’ ‘using’ [‘erased’] (DefParams | ContextTypes) ‘)’ - * DefParams ::= DefParam {‘,’ DefParam} - * DefParam ::= {Annotation} [‘inline’] Param + * TypelessClause ::= DefTermParamClause + * | UsingParamClause + * + * DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ + * UsingParamClause ::= ‘(’ ‘using’ (DefTermParams | ContextTypes) ‘)’ + * DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ + * DefTermParams ::= DefTermParam {‘,’ DefTermParam} + * DefTermParam ::= {Annotation} [‘erased’] [‘inline’] Param * * Param ::= id `:' ParamType [`=' Expr] * * @return the list of parameter definitions */ - def paramClause(nparams: Int, // number of parameters preceding this clause - ofClass: Boolean = false, // owner is a class - ofCaseClass: Boolean = false, // owner is a case class - prefix: Boolean = false, // clause precedes name of an extension method - givenOnly: Boolean = false, // only given parameters allowed - firstClause: Boolean = false // clause is the first in regular list of clauses - ): List[ValDef] = { + def termParamClause( + numLeadParams: Int, // number of parameters preceding this clause + ofClass: Boolean = false, // owner is a class + ofCaseClass: Boolean = false, // owner is a case class + prefix: Boolean = false, // clause precedes name of an extension method + givenOnly: Boolean = false, // only given parameters allowed + firstClause: Boolean = false // clause is the first in regular list of clauses + ): List[ValDef] = { var impliedMods: Modifiers = EmptyModifiers def addParamMod(mod: () => Mod) = impliedMods = addMod(impliedMods, atSpan(in.skipToken()) { mod() }) @@ -3168,12 +3259,12 @@ object Parsers { else if isIdent(nme.using) then addParamMod(() => Mod.Given()) - if isErased then - addParamMod(() => Mod.Erased()) def param(): ValDef = { val start = in.offset var mods = impliedMods.withAnnotations(annotations()) + if isErasedKw then + mods = addModifier(mods) if (ofClass) { mods = addFlag(modifiers(start = mods), ParamAccessor) mods = @@ -3184,8 +3275,8 @@ object Parsers { val mod = atSpan(in.skipToken()) { Mod.Var() } addMod(mods, mod) else - if (!(mods.flags &~ (ParamAccessor | Inline | impliedMods.flags)).isEmpty) - syntaxError("`val` or `var` expected") + if (!(mods.flags &~ (ParamAccessor | Inline | Erased | impliedMods.flags)).isEmpty) + syntaxError(em"`val` or `var` expected") if (firstClause && ofCaseClass) mods else mods | PrivateLocal } @@ -3222,7 +3313,7 @@ object Parsers { checkVarArgsRules(rest) } - // begin paramClause + // begin termParamClause inParens { if in.token == RPAREN && !prefix && !impliedMods.is(Given) then Nil else @@ -3231,34 +3322,46 @@ object Parsers { else paramMods() if givenOnly && !impliedMods.is(Given) then - syntaxError("`using` expected") - val isParams = - !impliedMods.is(Given) - || startParamTokens.contains(in.token) - || isIdent && (in.name == nme.inline || in.lookahead.isColon) - if isParams then commaSeparated(() => param()) - else contextTypes(ofClass, nparams, impliedMods) + syntaxError(em"`using` expected") + val (firstParamMod, isParams) = + var mods = EmptyModifiers + if in.lookahead.isColon then + (mods, true) + else + if isErased then mods = addModifier(mods) + val isParams = + !impliedMods.is(Given) + || startParamTokens.contains(in.token) + || isIdent && (in.name == nme.inline || in.lookahead.isColon) + (mods, isParams) + (if isParams then commaSeparated(() => param()) + else contextTypes(ofClass, numLeadParams, impliedMods)) match { + case Nil => Nil + case (h :: t) => h.withAddedFlags(firstParamMod.flags) :: t + } checkVarArgsRules(clause) clause } } - /** ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] - * DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] + /** ClsTermParamClauses ::= {ClsTermParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] + * TypelessClauses ::= TypelessClause {TypelessClause} * * @return The parameter definitions */ - def paramClauses(ofClass: Boolean = false, - ofCaseClass: Boolean = false, - givenOnly: Boolean = false, - numLeadParams: Int = 0): List[List[ValDef]] = + def termParamClauses( + ofClass: Boolean = false, + ofCaseClass: Boolean = false, + givenOnly: Boolean = false, + numLeadParams: Int = 0 + ): List[List[ValDef]] = - def recur(firstClause: Boolean, nparams: Int): List[List[ValDef]] = + def recur(firstClause: Boolean, numLeadParams: Int): List[List[ValDef]] = newLineOptWhenFollowedBy(LPAREN) if in.token == LPAREN then val paramsStart = in.offset - val params = paramClause( - nparams, + val params = termParamClause( + numLeadParams, ofClass = ofClass, ofCaseClass = ofCaseClass, givenOnly = givenOnly, @@ -3266,12 +3369,12 @@ object Parsers { val lastClause = params.nonEmpty && params.head.mods.flags.is(Implicit) params :: ( if lastClause then Nil - else recur(firstClause = false, nparams + params.length)) + else recur(firstClause = false, numLeadParams + params.length)) else Nil end recur recur(firstClause = true, numLeadParams) - end paramClauses + end termParamClauses /* -------- DEFS ------------------------------------------- */ @@ -3310,19 +3413,19 @@ object Parsers { in.languageImportContext = in.languageImportContext.importContext(imp, NoSymbol) for case ImportSelector(id @ Ident(imported), EmptyTree, _) <- selectors do if Feature.handleGlobalLanguageImport(prefix, imported) && !outermost then - syntaxError(i"this language import is only allowed at the toplevel", id.span) + syntaxError(em"this language import is only allowed at the toplevel", id.span) if allSourceVersionNames.contains(imported) && prefix.isEmpty then if !outermost then - syntaxError(i"source version import is only allowed at the toplevel", id.span) + syntaxError(em"source version import is only allowed at the toplevel", id.span) else if ctx.compilationUnit.sourceVersion.isDefined then - syntaxError(i"duplicate source version import", id.span) + syntaxError(em"duplicate source version import", id.span) else if illegalSourceVersionNames.contains(imported) then val candidate = val nonMigration = imported.toString.replace("-migration", "") validSourceVersionNames.find(_.show == nonMigration) - val baseMsg = i"`$imported` is not a valid source version" + val baseMsg = em"`$imported` is not a valid source version" val msg = candidate match - case Some(member) => i"$baseMsg, did you mean language.`$member`?" + case Some(member) => baseMsg.append(i", did you mean language.`$member`?") case _ => baseMsg syntaxError(msg, id.span) else @@ -3385,7 +3488,7 @@ object Parsers { case _ => if isIdent(nme.raw.STAR) then wildcardSelector() else - if !idOK then syntaxError(i"named imports cannot follow wildcard imports") + if !idOK then syntaxError(em"named imports cannot follow wildcard imports") namedSelector(termIdent()) } @@ -3485,7 +3588,7 @@ object Parsers { if sourceVersion.isAtLeast(future) then deprecationWarning( em"""`= _` has been deprecated; use `= uninitialized` instead. - |`uninitialized` can be imported with `scala.compiletime.uninitialized`.""".toMessage, + |`uninitialized` can be imported with `scala.compiletime.uninitialized`.""", rhsOffset) placeholderParams = placeholderParams.tail atSpan(rhs0.span) { Ident(nme.WILDCARD) } @@ -3513,10 +3616,12 @@ object Parsers { } /** DefDef ::= DefSig [‘:’ Type] ‘=’ Expr - * | this ParamClause ParamClauses `=' ConstrExpr + * | this TypelessClauses [DefImplicitClause] `=' ConstrExpr * DefDcl ::= DefSig `:' Type - * DefSig ::= id [DefTypeParamClause] DefParamClauses - * | ExtParamClause [nl] [‘.’] id DefParamClauses + * DefSig ::= id [DefTypeParamClause] DefTermParamClauses + * + * if clauseInterleaving is enabled: + * DefSig ::= id [DefParamClauses] [DefImplicitClause] */ def defDefOrDcl(start: Offset, mods: Modifiers, numLeadParams: Int = 0): DefDef = atSpan(start, nameStart) { @@ -3526,7 +3631,7 @@ object Parsers { else ": Unit " // trailing space ensures that `def f()def g()` works. if migrateTo3 then report.errorOrMigrationWarning( - s"Procedure syntax no longer supported; `$toInsert` should be inserted here", + em"Procedure syntax no longer supported; `$toInsert` should be inserted here", in.sourcePos(), from = `3.0`) patch(source, Span(in.lastOffset), toInsert) true @@ -3535,10 +3640,10 @@ object Parsers { if (in.token == THIS) { in.nextToken() - val vparamss = paramClauses(numLeadParams = numLeadParams) + val vparamss = termParamClauses(numLeadParams = numLeadParams) if (vparamss.isEmpty || vparamss.head.take(1).exists(_.mods.isOneOf(GivenOrImplicit))) in.token match { - case LBRACKET => syntaxError("no type parameters allowed here") + case LBRACKET => syntaxError(em"no type parameters allowed here") case EOF => incompleteInputError(AuxConstructorNeedsNonImplicitParameter()) case _ => syntaxError(AuxConstructorNeedsNonImplicitParameter(), nameStart) } @@ -3553,9 +3658,18 @@ object Parsers { val mods1 = addFlag(mods, Method) val ident = termIdent() var name = ident.name.asTermName - val tparams = typeParamClauseOpt(ParamOwner.Def) - val vparamss = paramClauses(numLeadParams = numLeadParams) + val paramss = + if in.featureEnabled(Feature.clauseInterleaving) then + // If you are making interleaving stable manually, please refer to the PR introducing it instead, section "How to make non-experimental" + typeOrTermParamClauses(ParamOwner.Def, numLeadParams = numLeadParams) + else + val tparams = typeParamClauseOpt(ParamOwner.Def) + val vparamss = termParamClauses(numLeadParams = numLeadParams) + + joinParams(tparams, vparamss) + var tpt = fromWithinReturnType { typedOpt() } + if (migrateTo3) newLineOptWhenFollowedBy(LBRACE) val rhs = if in.token == EQUALS then @@ -3572,7 +3686,7 @@ object Parsers { accept(EQUALS) expr() - val ddef = DefDef(name, joinParams(tparams, vparamss), tpt, rhs) + val ddef = DefDef(name, paramss, tpt, rhs) if (isBackquoted(ident)) ddef.pushAttachment(Backquoted, ()) finalizeDef(ddef, mods1, start) } @@ -3631,13 +3745,13 @@ object Parsers { case TypeBoundsTree(EmptyTree, upper, _) => rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) case _ => - syntaxError(i"cannot combine lower bound and match type alias", eqOffset) + syntaxError(em"cannot combine lower bound and match type alias", eqOffset) } case _ => if mods.is(Opaque) then rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) else - syntaxError(i"cannot combine bound and alias", eqOffset) + syntaxError(em"cannot combine bound and alias", eqOffset) } makeTypeDef(rhs) } @@ -3693,12 +3807,12 @@ object Parsers { val templ = templateOpt(constr) finalizeDef(TypeDef(name, templ), mods, start) - /** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses + /** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsTermParamClauses */ def classConstr(isCaseClass: Boolean = false): DefDef = atSpan(in.lastOffset) { val tparams = typeParamClauseOpt(ParamOwner.Class) val cmods = fromWithinClassConstr(constrModsOpt()) - val vparamss = paramClauses(ofClass = true, ofCaseClass = isCaseClass) + val vparamss = termParamClauses(ofClass = true, ofCaseClass = isCaseClass) makeConstructor(tparams, vparamss).withMods(cmods) } @@ -3718,7 +3832,7 @@ object Parsers { private def checkAccessOnly(mods: Modifiers, where: String): Modifiers = val mods1 = mods & (AccessFlags | Enum) if mods1 ne mods then - syntaxError(s"Only access modifiers are allowed on enum $where") + syntaxError(em"Only access modifiers are allowed on enum $where") mods1 /** EnumDef ::= id ClassConstr InheritClauses EnumBody @@ -3774,17 +3888,17 @@ object Parsers { vparamss: List[List[Tree]], stat: Tree): Unit = stat match { case stat: DefDef => if stat.mods.is(ExtensionMethod) && vparamss.nonEmpty then - syntaxError(i"no extension method allowed here since leading parameter was already given", stat.span) + syntaxError(em"no extension method allowed here since leading parameter was already given", stat.span) else if !stat.mods.is(ExtensionMethod) && vparamss.isEmpty then - syntaxError(i"an extension method is required here", stat.span) + syntaxError(em"an extension method is required here", stat.span) else if tparams.nonEmpty && stat.leadingTypeParams.nonEmpty then - syntaxError(i"extension method cannot have type parameters since some were already given previously", + syntaxError(em"extension method cannot have type parameters since some were already given previously", stat.leadingTypeParams.head.span) else if stat.rhs.isEmpty then - syntaxError(i"extension method cannot be abstract", stat.span) + syntaxError(em"extension method cannot be abstract", stat.span) case EmptyTree => case stat => - syntaxError(i"extension clause can only define methods", stat.span) + syntaxError(em"extension clause can only define methods", stat.span) } /** GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) @@ -3800,14 +3914,14 @@ object Parsers { newLineOpt() val vparamss = if in.token == LPAREN && in.lookahead.isIdent(nme.using) - then paramClauses(givenOnly = true) + then termParamClauses(givenOnly = true) else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty if !(name.isEmpty && noParams) then acceptColon() val parents = if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil - else constrApp() :: withConstrApps() + else refinedTypeRest(constrApp()) :: withConstrApps() val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then accept(EQUALS) @@ -3835,33 +3949,33 @@ object Parsers { finalizeDef(gdef, mods1, start) } - /** Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefParam ‘)’ + /** Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ * {UsingParamClause} ExtMethods */ def extension(): ExtMethods = val start = in.skipToken() val tparams = typeParamClauseOpt(ParamOwner.Def) val leadParamss = ListBuffer[List[ValDef]]() - def nparams = leadParamss.map(_.length).sum + def numLeadParams = leadParamss.map(_.length).sum while - val extParams = paramClause(nparams, prefix = true) + val extParams = termParamClause(numLeadParams, prefix = true) leadParamss += extParams isUsingClause(extParams) do () - leadParamss ++= paramClauses(givenOnly = true, numLeadParams = nparams) + leadParamss ++= termParamClauses(givenOnly = true, numLeadParams = numLeadParams) if in.isColon then - syntaxError("no `:` expected here") + syntaxError(em"no `:` expected here") in.nextToken() val methods: List[Tree] = if in.token == EXPORT then exportClause() else if isDefIntro(modifierTokens) then - extMethod(nparams) :: Nil + extMethod(numLeadParams) :: Nil else in.observeIndented() newLineOptWhenFollowedBy(LBRACE) - if in.isNestedStart then inDefScopeBraces(extMethods(nparams)) - else { syntaxErrorOrIncomplete("Extension without extension methods") ; Nil } + if in.isNestedStart then inDefScopeBraces(extMethods(numLeadParams)) + else { syntaxErrorOrIncomplete(em"Extension without extension methods") ; Nil } val result = atSpan(start)(ExtMethods(joinParams(tparams, leadParamss.toList), methods)) val comment = in.getDocComment(start) if comment.isDefined then @@ -3894,7 +4008,7 @@ object Parsers { meths += defDefOrDcl(start, mods, numLeadParams) in.token != EOF && statSepOrEnd(meths, what = "extension method") do () - if meths.isEmpty then syntaxErrorOrIncomplete("`def` expected") + if meths.isEmpty then syntaxErrorOrIncomplete(em"`def` expected") meths.toList } @@ -3940,7 +4054,7 @@ object Parsers { in.nextToken() if (in.token == LBRACE || in.token == COLONeol) { report.errorOrMigrationWarning( - "`extends` must be followed by at least one parent", + em"`extends` must be followed by at least one parent", in.sourcePos(), from = `3.0`) Nil } @@ -4054,8 +4168,8 @@ object Parsers { stats.toList } - /** SelfType ::= id [‘:’ [CaptureSet] InfixType] ‘=>’ - * | ‘this’ ‘:’ [CaptureSet] InfixType ‘=>’ + /** SelfType ::= id [‘:’ InfixType] ‘=>’ + * | ‘this’ ‘:’ InfixType ‘=>’ */ def selfType(): ValDef = if (in.isIdent || in.token == THIS) @@ -4071,10 +4185,7 @@ object Parsers { val selfTpt = if in.isColon then in.nextToken() - if in.token == LBRACE && followingIsCaptureSet() then - CapturingTypeTree(captureSet(), infixType()) - else - infixType() + infixType() else if selfName == nme.WILDCARD then accept(COLONfollow) TypeTree() @@ -4082,7 +4193,7 @@ object Parsers { in.token = SELFARROW // suppresses INDENT insertion after `=>` in.nextToken() else - syntaxError("`=>` expected after self type") + syntaxError(em"`=>` expected after self type") makeSelfDef(selfName, selfTpt) } else EmptyValDef @@ -4129,24 +4240,26 @@ object Parsers { def refineStatSeq(): List[Tree] = { val stats = new ListBuffer[Tree] def checkLegal(tree: Tree): List[Tree] = - val problem = tree match + def ok = tree :: Nil + def fail(msg: Message) = + syntaxError(msg, tree.span) + Nil + tree match case tree: ValDef if tree.mods.is(Mutable) => - i"""refinement cannot be a mutable var. - |You can use an explicit getter ${tree.name} and setter ${tree.name}_= instead""" + fail(em"""refinement cannot be a mutable var. + |You can use an explicit getter ${tree.name} and setter ${tree.name}_= instead""") case tree: MemberDef if !(tree.mods.flags & ModifierFlags).isEmpty => - i"refinement cannot be ${(tree.mods.flags & ModifierFlags).flagStrings().mkString("`", "`, `", "`")}" + fail(em"refinement cannot be ${(tree.mods.flags & ModifierFlags).flagStrings().mkString("`", "`, `", "`")}") case tree: DefDef if tree.termParamss.nestedExists(!_.rhs.isEmpty) => - i"refinement cannot have default arguments" + fail(em"refinement cannot have default arguments") case tree: ValOrDefDef => - if tree.rhs.isEmpty then "" - else "refinement cannot have a right-hand side" + if tree.rhs.isEmpty then ok + else fail(em"refinement cannot have a right-hand side") case tree: TypeDef => - if !tree.isClassDef then "" - else "refinement cannot be a class or trait" + if !tree.isClassDef then ok + else fail(em"refinement cannot be a class or trait") case _ => - "this kind of definition cannot be a refinement" - if problem.isEmpty then tree :: Nil - else { syntaxError(problem, tree.span); Nil } + fail(em"this kind of definition cannot be a refinement") while val dclFound = isDclIntro diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index a4eff045b4ac..fac73bfb4992 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -17,9 +17,11 @@ import scala.collection.mutable import scala.collection.immutable.SortedMap import rewrites.Rewrites.patch import config.Feature -import config.Feature.migrateTo3 +import config.Feature.{migrateTo3, fewerBracesEnabled} import config.SourceVersion.`3.0` -import reporting.{NoProfile, Profile} +import reporting.{NoProfile, Profile, Message} + +import java.util.Objects object Scanners { @@ -100,19 +102,23 @@ object Scanners { */ var errOffset: Offset = NoOffset + /** Implements CharArrayReader's error method */ + protected def error(msg: String, off: Offset): Unit = + error(msg.toMessage, off) + /** Generate an error at the given offset */ - def error(msg: String, off: Offset = offset): Unit = { + def error(msg: Message, off: Offset = offset): Unit = { errorButContinue(msg, off) token = ERROR errOffset = off } - def errorButContinue(msg: String, off: Offset = offset): Unit = + def errorButContinue(msg: Message, off: Offset = offset): Unit = report.error(msg, sourcePos(off)) /** signal an error where the input ended in the middle of a token */ - def incompleteInputError(msg: String): Unit = { - report.incompleteInputError(msg.toMessage, sourcePos()) + def incompleteInputError(msg: Message): Unit = { + report.incompleteInputError(msg, sourcePos()) token = EOF errOffset = offset } @@ -122,9 +128,11 @@ object Scanners { // Setting token data ---------------------------------------------------- + protected def initialCharBufferSize = 1024 + /** A character buffer for literals */ - protected val litBuf = CharBuffer() + protected val litBuf = CharBuffer(initialCharBufferSize) /** append Unicode character to "litBuf" buffer */ @@ -159,7 +167,7 @@ object Scanners { // disallow trailing numeric separator char, but continue lexing def checkNoTrailingSeparator(): Unit = if (!litBuf.isEmpty && isNumberSeparator(litBuf.last)) - errorButContinue("trailing separator is not allowed", offset + litBuf.length - 1) + errorButContinue(em"trailing separator is not allowed", offset + litBuf.length - 1) } class Scanner(source: SourceFile, override val startFrom: Offset = 0, profile: Profile = NoProfile, allowIndent: Boolean = true)(using Context) extends ScannerCommon(source) { @@ -192,7 +200,7 @@ object Scanners { val rewriteTargets = List(s.newSyntax, s.oldSyntax, s.indent, s.noindent) val enabled = rewriteTargets.filter(_.value) if (enabled.length > 1) - error(s"illegal combination of -rewrite targets: ${enabled(0).name} and ${enabled(1).name}") + error(em"illegal combination of -rewrite targets: ${enabled(0).name} and ${enabled(1).name}") } private var myLanguageImportContext: Context = ctx @@ -202,25 +210,6 @@ object Scanners { def featureEnabled(name: TermName) = Feature.enabled(name)(using languageImportContext) def erasedEnabled = featureEnabled(Feature.erasedDefinitions) - private inline val fewerBracesByDefault = false - // turn on to study impact on codebase if `fewerBraces` was the default - - private var fewerBracesEnabledCache = false - private var fewerBracesEnabledCtx: Context = NoContext - - def fewerBracesEnabled = - if fewerBracesEnabledCtx ne myLanguageImportContext then - fewerBracesEnabledCache = - featureEnabled(Feature.fewerBraces) - || fewerBracesByDefault && indentSyntax && !migrateTo3 - // ensure that fewer braces is not the default for 3.0-migration since - // { x: T => - // expr - // } - // would be ambiguous - fewerBracesEnabledCtx = myLanguageImportContext - fewerBracesEnabledCache - private var postfixOpsEnabledCache = false private var postfixOpsEnabledCtx: Context = NoContext @@ -257,14 +246,14 @@ object Scanners { def getDocComment(pos: Int): Option[Comment] = docstringMap.get(pos) /** A buffer for comments */ - private val commentBuf = CharBuffer() + private val commentBuf = CharBuffer(initialCharBufferSize) def toToken(identifier: SimpleName): Token = def handleMigration(keyword: Token): Token = if scala3keywords.contains(keyword) && migrateTo3 then val what = tokenString(keyword) report.errorOrMigrationWarning( - i"$what is now a keyword, write `$what` instead of $what to keep it as an identifier", + em"$what is now a keyword, write `$what` instead of $what to keep it as an identifier", sourcePos(), from = `3.0`) patch(source, Span(offset), "`") @@ -534,7 +523,8 @@ object Scanners { * * The following tokens can start an indentation region: * - * : = => <- if then else while do try catch finally for yield match + * : = => <- if then else while do try catch + * finally for yield match throw return with * * Inserting an INDENT starts a new indentation region with the indentation of the current * token as indentation width. @@ -566,7 +556,7 @@ object Scanners { // If nextWidth is an indentation level not yet seen by enclosing indentation // region, invoke `handler`. - def handleNewIndentWidth(r: Region, handler: Indented => Unit): Unit = r match + inline def handleNewIndentWidth(r: Region, inline handler: Indented => Unit): Unit = r match case r @ Indented(curWidth, prefix, outer) if curWidth < nextWidth && !r.otherIndentWidths.contains(nextWidth) && nextWidth != lastWidth => handler(r) @@ -584,7 +574,7 @@ object Scanners { * they start with `(`, `[` or `{`, or the last statement ends in a `return`. * The Scala 2 rules apply under source `3.0-migration` or under `-no-indent`. */ - def isContinuing = + inline def isContinuing = lastWidth < nextWidth && (openParensTokens.contains(token) || lastToken == RETURN) && !pastBlankLine @@ -621,10 +611,17 @@ object Scanners { case r: Indented => insert(OUTDENT, offset) handleNewIndentWidth(r.enclosing, ir => - errorButContinue( - i"""The start of this line does not match any of the previous indentation widths. - |Indentation width of current line : $nextWidth - |This falls between previous widths: ${ir.width} and $lastWidth""")) + if next.token == DOT + && !nextWidth.isClose(r.indentWidth) + && !nextWidth.isClose(ir.indentWidth) + then + ir.otherIndentWidths += nextWidth + else + val lw = lastWidth + errorButContinue( + em"""The start of this line does not match any of the previous indentation widths. + |Indentation width of current line : $nextWidth + |This falls between previous widths: ${ir.width} and $lw""")) case r => if skipping then if r.enclosing.isClosedByUndentAt(nextWidth) then @@ -640,16 +637,17 @@ object Scanners { else if lastToken == SELFARROW then currentRegion.knownWidth = nextWidth else if (lastWidth != nextWidth) - errorButContinue(spaceTabMismatchMsg(lastWidth, nextWidth)) + val lw = lastWidth + errorButContinue(spaceTabMismatchMsg(lw, nextWidth)) if token != OUTDENT then handleNewIndentWidth(currentRegion, _.otherIndentWidths += nextWidth) if next.token == EMPTY then profile.recordNewLine() end handleNewLine - def spaceTabMismatchMsg(lastWidth: IndentWidth, nextWidth: IndentWidth) = - i"""Incompatible combinations of tabs and spaces in indentation prefixes. - |Previous indent : $lastWidth + def spaceTabMismatchMsg(lastWidth: IndentWidth, nextWidth: IndentWidth): Message = + em"""Incompatible combinations of tabs and spaces in indentation prefixes. + |Previous indent : $lastWidth |Latest indent : $nextWidth""" def observeColonEOL(inTemplate: Boolean): Unit = @@ -792,22 +790,24 @@ object Scanners { private def isSupplementary(high: Char, test: Int => Boolean, strict: Boolean = true): Boolean = isHighSurrogate(high) && { var res = false - nextChar() - val low = ch + val low = lookaheadChar() if isLowSurrogate(low) then - nextChar() val codepoint = toCodePoint(high, low) - if isValidCodePoint(codepoint) && test(codepoint) then - putChar(high) - putChar(low) - res = true + if isValidCodePoint(codepoint) then + if test(codepoint) then + putChar(high) + putChar(low) + nextChar() + nextChar() + res = true else - error(s"illegal character '${toUnicode(high)}${toUnicode(low)}'") + error(em"illegal character '${toUnicode(high)}${toUnicode(low)}'") else if !strict then putChar(high) + nextChar() res = true else - error(s"illegal character '${toUnicode(high)}' missing low surrogate") + error(em"illegal character '${toUnicode(high)}' missing low surrogate") res } private def atSupplementary(ch: Char, f: Int => Boolean): Boolean = @@ -884,7 +884,7 @@ object Scanners { case _ => base = 10 ; putChar('0') } if (base != 10 && !isNumberSeparator(ch) && digit2int(ch, base) < 0) - error("invalid literal number") + error(em"invalid literal number") } fetchLeadingZero() getNumber() @@ -904,7 +904,6 @@ object Scanners { if (ch == '\"') { if (lookaheadChar() == '\"') { nextRawChar() - //offset += 3 // first part is positioned at the quote nextRawChar() stringPart(multiLine = true) } @@ -915,7 +914,6 @@ object Scanners { } } else { - //offset += 1 // first part is positioned at the quote stringPart(multiLine = false) } } @@ -950,13 +948,13 @@ object Scanners { val isEmptyCharLit = (ch == '\'') getLitChar() if ch == '\'' then - if isEmptyCharLit then error("empty character literal (use '\\'' for single quote)") - else if litBuf.length != 1 then error("illegal codepoint in Char constant: " + litBuf.toString.map(toUnicode).mkString("'", "", "'")) + if isEmptyCharLit then error(em"empty character literal (use '\\'' for single quote)") + else if litBuf.length != 1 then error(em"illegal codepoint in Char constant: ${litBuf.toString.map(toUnicode).mkString("'", "", "'")}") else finishCharLit() - else if isEmptyCharLit then error("empty character literal") - else error("unclosed character literal") + else if isEmptyCharLit then error(em"empty character literal") + else error(em"unclosed character literal") case _ => - error("unclosed character literal") + error(em"unclosed character literal") } } fetchSingleQuote() @@ -987,35 +985,34 @@ object Scanners { case SU => if (isAtEnd) token = EOF else { - error("illegal character") + error(em"illegal character") nextChar() } case _ => def fetchOther() = - if (ch == '\u21D2') { + if ch == '\u21D2' then nextChar(); token = ARROW - report.deprecationWarning("The unicode arrow `⇒` is deprecated, use `=>` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", sourcePos(offset)) - } - else if (ch == '\u2190') { + report.deprecationWarning(em"The unicode arrow `⇒` is deprecated, use `=>` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", sourcePos(offset)) + else if ch == '\u2190' then nextChar(); token = LARROW - report.deprecationWarning("The unicode arrow `←` is deprecated, use `<-` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", sourcePos(offset)) - } - else if (Character.isUnicodeIdentifierStart(ch)) { + report.deprecationWarning(em"The unicode arrow `←` is deprecated, use `<-` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", sourcePos(offset)) + else if isUnicodeIdentifierStart(ch) then putChar(ch) nextChar() getIdentRest() - } - else if (isSpecial(ch)) { + if ch == '"' && token == IDENTIFIER then token = INTERPOLATIONID + else if isSpecial(ch) then putChar(ch) nextChar() getOperatorRest() - } else if isSupplementary(ch, isUnicodeIdentifierStart) then getIdentRest() - else { - error(s"illegal character '${toUnicode(ch)}'") + if ch == '"' && token == IDENTIFIER then token = INTERPOLATIONID + else if isSupplementary(ch, isSpecial) then + getOperatorRest() + else + error(em"illegal character '${toUnicode(ch)}'") nextChar() - } fetchOther() } } @@ -1043,7 +1040,7 @@ object Scanners { if (ch == '/') nextChar() else skipComment() } - else if (ch == SU) incompleteInputError("unclosed comment") + else if (ch == SU) incompleteInputError(em"unclosed comment") else { nextChar(); skipComment() } def nestedComment() = { nextChar(); skipComment() } val start = lastCharOffset @@ -1091,6 +1088,7 @@ object Scanners { next class LookaheadScanner(val allowIndent: Boolean = false) extends Scanner(source, offset, allowIndent = allowIndent) { + override protected def initialCharBufferSize = 8 override def languageImportContext = Scanner.this.languageImportContext } @@ -1123,14 +1121,14 @@ object Scanners { nextChar() finishNamedToken(BACKQUOTED_IDENT, target = this) if (name.length == 0) - error("empty quoted identifier") + error(em"empty quoted identifier") else if (name == nme.WILDCARD) - error("wildcard invalid as backquoted identifier") + error(em"wildcard invalid as backquoted identifier") } - else error("unclosed quoted identifier") + else error(em"unclosed quoted identifier") } - private def getIdentRest(): Unit = (ch: @switch) match { + @tailrec private def getIdentRest(): Unit = (ch: @switch) match { case 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | @@ -1165,7 +1163,7 @@ object Scanners { finishNamed() } - private def getOperatorRest(): Unit = (ch: @switch) match { + @tailrec private def getOperatorRest(): Unit = (ch: @switch) match { case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | @@ -1176,23 +1174,13 @@ object Scanners { if nxch == '/' || nxch == '*' then finishNamed() else { putChar(ch); nextChar(); getOperatorRest() } case _ => - if (isSpecial(ch)) { putChar(ch); nextChar(); getOperatorRest() } + if isSpecial(ch) then { putChar(ch); nextChar(); getOperatorRest() } + else if isSupplementary(ch, isSpecial) then getOperatorRest() else finishNamed() } private def getIdentOrOperatorRest(): Unit = - if (isIdentifierPart(ch)) - getIdentRest() - else ch match { - case '~' | '!' | '@' | '#' | '%' | - '^' | '*' | '+' | '-' | '<' | - '>' | '?' | ':' | '=' | '&' | - '|' | '\\' | '/' => - getOperatorRest() - case _ => - if (isSpecial(ch)) getOperatorRest() - else finishNamed() - } + if (isIdentifierPart(ch) || isSupplementary(ch, isIdentifierPart)) getIdentRest() else getOperatorRest() def isSoftModifier: Boolean = token == IDENTIFIER @@ -1221,7 +1209,7 @@ object Scanners { nextChar() token = STRINGLIT } - else error("unclosed string literal") + else error(em"unclosed string literal") } private def getRawStringLit(): Unit = @@ -1235,7 +1223,7 @@ object Scanners { getRawStringLit() } else if (ch == SU) - incompleteInputError("unclosed multi-line string literal") + incompleteInputError(em"unclosed multi-line string literal") else { putChar(ch) nextRawChar() @@ -1305,7 +1293,7 @@ object Scanners { else if atSupplementary(ch, isUnicodeIdentifierStart) then getInterpolatedIdentRest(hasSupplement = true) else - error("invalid string interpolation: `$$`, `$\"`, `$`ident or `$`BlockExpr expected", off = charOffset - 2) + error("invalid string interpolation: `$$`, `$\"`, `$`ident or `$`BlockExpr expected".toMessage, off = charOffset - 2) putChar('$') getStringPart(multiLine) } @@ -1313,9 +1301,9 @@ object Scanners { val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF))) if (isUnclosedLiteral) if (multiLine) - incompleteInputError("unclosed multi-line string literal") + incompleteInputError(em"unclosed multi-line string literal") else - error("unclosed string literal") + error(em"unclosed string literal") else { putChar(ch) nextRawChar() @@ -1467,7 +1455,7 @@ object Scanners { } def checkNoLetter(): Unit = if (isIdentifierPart(ch) && ch >= ' ') - error("Invalid literal number") + error(em"Invalid literal number") /** Read a number into strVal and set base */ @@ -1515,7 +1503,7 @@ object Scanners { if (ch == '\'') finishCharLit() else { token = op - strVal = if (name != null) name.toString else null + strVal = Objects.toString(name) litBuf.clear() } } @@ -1550,7 +1538,7 @@ object Scanners { def resume(lastTokenData: TokenData): Unit = { this.copyFrom(lastTokenData) if (next.token != EMPTY && !ctx.reporter.hasErrors) - error("unexpected end of input: possible missing '}' in XML block") + error(em"unexpected end of input: possible missing '}' in XML block") nextToken() } @@ -1684,6 +1672,17 @@ object Scanners { def < (that: IndentWidth): Boolean = this <= that && !(that <= this) + /** Does `this` differ from `that` by not more than a single space? */ + def isClose(that: IndentWidth): Boolean = this match + case Run(ch1, n1) => + that match + case Run(ch2, n2) => ch1 == ch2 && ch1 != '\t' && (n1 - n2).abs <= 1 + case Conc(l, r) => false + case Conc(l1, r1) => + that match + case Conc(l2, r2) => l1 == l2 && r1.isClose(r2) + case _ => false + def toPrefix: String = this match { case Run(ch, n) => ch.toString * n case Conc(l, r) => l.toPrefix ++ r.toPrefix diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index 7d27b3ca82b9..dba0ad3fa2ee 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -231,6 +231,8 @@ object Tokens extends TokensCommon { final val canStartInfixTypeTokens: TokenSet = literalTokens | identifierTokens | BitSet( THIS, SUPER, USCORE, LPAREN, LBRACE, AT) + final val canStartTypeTokens: TokenSet = canStartInfixTypeTokens | BitSet(LBRACE) + final val templateIntroTokens: TokenSet = BitSet(CLASS, TRAIT, OBJECT, ENUM, CASECLASS, CASEOBJECT) final val dclIntroTokens: TokenSet = BitSet(DEF, VAL, VAR, TYPE, GIVEN) @@ -287,7 +289,7 @@ object Tokens extends TokensCommon { final val closingParens = BitSet(RPAREN, RBRACKET, RBRACE) - final val softModifierNames = Set(nme.inline, nme.opaque, nme.open, nme.transparent, nme.infix) + final val softModifierNames = Set(nme.inline, nme.into, nme.opaque, nme.open, nme.transparent, nme.infix) def showTokenDetailed(token: Int): String = debugString(token) diff --git a/compiler/src/dotty/tools/dotc/parsing/package.scala b/compiler/src/dotty/tools/dotc/parsing/package.scala index a1f9c8d73ad4..ee3ecda60aee 100644 --- a/compiler/src/dotty/tools/dotc/parsing/package.scala +++ b/compiler/src/dotty/tools/dotc/parsing/package.scala @@ -17,7 +17,7 @@ package object parsing { def precedence(operator: Name): Int = if (operator eq nme.ERROR) -1 else { - val firstCh = operator.firstPart.head + val firstCh = operator.firstCodePoint if (isScalaLetter(firstCh)) 1 else if (operator.isOpAssignmentName) 0 else firstCh match { diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala index 2c6c5361e51c..0f7d426fbd28 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala @@ -11,8 +11,7 @@ package xml import Utility._ import util.Chars.SU - - +import scala.collection.BufferedIterator /** This is not a public trait - it contains common code shared * between the library level XML parser and the compiler's. diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index 3d9f5fb7ad6d..b3f41fab9eaa 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -6,6 +6,8 @@ package xml import scala.language.unsafeNulls import scala.collection.mutable +import scala.collection.BufferedIterator +import core.Contexts.Context import mutable.{ Buffer, ArrayBuffer, ListBuffer } import scala.util.control.ControlThrowable import util.Chars.SU @@ -13,7 +15,7 @@ import Parsers._ import util.Spans._ import core._ import Constants._ -import Decorators.toMessage +import Decorators.{em, toMessage} import util.SourceFile import Utility._ @@ -50,7 +52,7 @@ object MarkupParsers { override def getMessage: String = "input ended while parsing XML" } - class MarkupParser(parser: Parser, final val preserveWS: Boolean)(implicit src: SourceFile) extends MarkupParserCommon { + class MarkupParser(parser: Parser, final val preserveWS: Boolean)(using Context) extends MarkupParserCommon { import Tokens.{ LBRACE, RBRACE } @@ -330,9 +332,9 @@ object MarkupParsers { case c @ TruncatedXMLControl => ifTruncated(c.getMessage) case c @ (MissingEndTagControl | ConfusedAboutBracesControl) => - parser.syntaxError(c.getMessage + debugLastElem + ">", debugLastPos) + parser.syntaxError(em"${c.getMessage}$debugLastElem>", debugLastPos) case _: ArrayIndexOutOfBoundsException => - parser.syntaxError("missing end tag in XML literal for <%s>" format debugLastElem, debugLastPos) + parser.syntaxError(em"missing end tag in XML literal for <$debugLastElem>", debugLastPos) } finally parser.in.resume(saved) @@ -396,7 +398,7 @@ object MarkupParsers { tree } }, - msg => parser.syntaxError(msg, curOffset) + msg => parser.syntaxError(msg.toMessage, curOffset) ) def escapeToScala[A](op: => A, kind: String): A = { @@ -422,7 +424,7 @@ object MarkupParsers { */ def xScalaPatterns: List[Tree] = escapeToScala(parser.patterns(), "pattern") - def reportSyntaxError(offset: Int, str: String): Unit = parser.syntaxError(str, offset) + def reportSyntaxError(offset: Int, str: String): Unit = parser.syntaxError(str.toMessage, offset) def reportSyntaxError(str: String): Unit = { reportSyntaxError(curOffset, "in XML literal: " + str) nextch() diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index 21bb0fa2be54..1baf3a06ad9e 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -44,7 +44,7 @@ sealed trait Plugin { trait StandardPlugin extends Plugin { /** Non-research plugins should override this method to return the phases * - * @param options: commandline options to the plugin, `-P:plugname:opt1,opt2` becomes List(opt1, opt2) + * @param options commandline options to the plugin. * @return a list of phases to be added to the phase plan */ def init(options: List[String]): List[PluginPhase] @@ -57,8 +57,8 @@ trait StandardPlugin extends Plugin { trait ResearchPlugin extends Plugin { /** Research plugins should override this method to return the new phase plan * - * @param options: commandline options to the plugin, `-P:plugname:opt1,opt2` becomes List(opt1, opt2) - * @param plan: the given phase plan + * @param options commandline options to the plugin, `-P:plugname:opt1,opt2` becomes List(opt1, opt2) + * @param plan the given phase plan * @return the new phase plan */ def init(options: List[String], plan: List[List[Phase]])(using Context): List[List[Phase]] diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index 3093a1c0460f..c44fe4cf59b4 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -5,6 +5,7 @@ import scala.language.unsafeNulls import core._ import Contexts._ +import Decorators.em import config.{ PathResolver, Feature } import dotty.tools.io._ import Phases._ @@ -83,14 +84,14 @@ trait Plugins { // Verify required plugins are present. for (req <- ctx.settings.require.value ; if !(plugs exists (_.name == req))) - report.error("Missing required plugin: " + req) + report.error(em"Missing required plugin: $req") // Verify no non-existent plugin given with -P for { opt <- ctx.settings.pluginOptions.value if !(plugs exists (opt startsWith _.name + ":")) } - report.error("bad option: -P:" + opt) + report.error(em"bad option: -P:$opt") plugs } @@ -115,8 +116,6 @@ trait Plugins { /** Add plugin phases to phase plan */ def addPluginPhases(plan: List[List[Phase]])(using Context): List[List[Phase]] = { - // plugin-specific options. - // The user writes `-P:plugname:opt1,opt2`, but the plugin sees `List(opt1, opt2)`. def options(plugin: Plugin): List[String] = { def namec = plugin.name + ":" ctx.settings.pluginOptions.value filter (_ startsWith namec) map (_ stripPrefix namec) diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index f85845517d8c..3f32b29654c9 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -71,6 +71,16 @@ object Formatting { given Show[TypeComparer.ApproxState] with def show(x: TypeComparer.ApproxState) = TypeComparer.ApproxState.Repr.show(x) + given Show[ast.TreeInfo.PurityLevel] with + def show(x: ast.TreeInfo.PurityLevel) = x match + case ast.TreeInfo.Path => "PurityLevel.Path" + case ast.TreeInfo.Pure => "PurityLevel.Pure" + case ast.TreeInfo.Idempotent => "PurityLevel.Idempotent" + case ast.TreeInfo.Impure => "PurityLevel.Impure" + case ast.TreeInfo.PurePath => "PurityLevel.PurePath" + case ast.TreeInfo.IdempotentPath => "PurityLevel.IdempotentPath" + case _ => s"PurityLevel(${x.x})" + given Show[Showable] = ShowAny given Show[Shown] = ShowAny given Show[Int] = ShowAny @@ -90,6 +100,7 @@ object Formatting { given Show[util.SourceFile] = ShowAny given Show[util.Spans.Span] = ShowAny given Show[tasty.TreeUnpickler#OwnerTree] = ShowAny + given Show[typer.ForceDegree.Value] = ShowAny private def show1[A: Show](x: A)(using Context) = show2(Show[A].show(x).ctxShow) private def show2(x: Shown)(using Context): String = x match @@ -137,236 +148,6 @@ object Formatting { } } - /** The `em` string interpolator works like the `i` string interpolator, but marks nonsensical errors - * using `...` tags. - * Note: Instead of these tags, it would be nicer to return a data structure containing the message string - * and a boolean indicating whether the message is sensical, but then we cannot use string operations - * like concatenation, stripMargin etc on the values returned by em"...", and in the current error - * message composition methods, this is crucial. - */ - def forErrorMessages(op: Context ?=> String)(using Context): String = op(using errorMessageCtx) - - private class ErrorMessagePrinter(_ctx: Context) extends RefinedPrinter(_ctx): - override def toText(tp: Type): Text = wrapNonSensical(tp, super.toText(tp)) - override def toText(sym: Symbol): Text = wrapNonSensical(sym, super.toText(sym)) - - private def wrapNonSensical(arg: Any, text: Text)(using Context): Text = { - import Message._ - def isSensical(arg: Any): Boolean = arg match { - case tpe: Type => - tpe.exists && !tpe.isErroneous - case sym: Symbol if sym.isCompleted => - sym.info match { - case _: ErrorType | TypeAlias(_: ErrorType) | NoType => false - case _ => true - } - case _ => true - } - - if (isSensical(arg)) text - else nonSensicalStartTag ~ text ~ nonSensicalEndTag - } - - private type Recorded = Symbol | ParamRef | SkolemType - - private case class SeenKey(str: String, isType: Boolean) - private class Seen extends mutable.HashMap[SeenKey, List[Recorded]] { - - override def default(key: SeenKey) = Nil - - def record(str: String, isType: Boolean, entry: Recorded)(using Context): String = { - - /** If `e1` is an alias of another class of the same name, return the other - * class symbol instead. This normalization avoids recording e.g. scala.List - * and scala.collection.immutable.List as two different types - */ - def followAlias(e1: Recorded): Recorded = e1 match { - case e1: Symbol if e1.isAliasType => - val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol - if (underlying.name == e1.name) underlying else e1 - case _ => e1 - } - val key = SeenKey(str, isType) - val existing = apply(key) - lazy val dealiased = followAlias(entry) - - // alts: The alternatives in `existing` that are equal, or follow (an alias of) `entry` - var alts = existing.dropWhile(alt => dealiased ne followAlias(alt)) - if (alts.isEmpty) { - alts = entry :: existing - update(key, alts) - } - val suffix = alts.length match { - case 1 => "" - case n => n.toString.toCharArray.map { - case '0' => '⁰' - case '1' => '¹' - case '2' => '²' - case '3' => '³' - case '4' => '⁴' - case '5' => '⁵' - case '6' => '⁶' - case '7' => '⁷' - case '8' => '⁸' - case '9' => '⁹' - }.mkString - } - str + suffix - } - } - - private class ExplainingPrinter(seen: Seen)(_ctx: Context) extends ErrorMessagePrinter(_ctx) { - - /** True if printer should a source module instead of its module class */ - private def useSourceModule(sym: Symbol): Boolean = - sym.is(ModuleClass, butNot = Package) && sym.sourceModule.exists && !_ctx.settings.YdebugNames.value - - override def simpleNameString(sym: Symbol): String = - if (useSourceModule(sym)) simpleNameString(sym.sourceModule) - else seen.record(super.simpleNameString(sym), sym.isType, sym) - - override def ParamRefNameString(param: ParamRef): String = - seen.record(super.ParamRefNameString(param), param.isInstanceOf[TypeParamRef], param) - - override def toTextRef(tp: SingletonType): Text = tp match { - case tp: SkolemType => seen.record(tp.repr.toString, isType = true, tp) - case _ => super.toTextRef(tp) - } - - override def toText(tp: Type): Text = tp match { - case tp: TypeRef if useSourceModule(tp.symbol) => Str("object ") ~ super.toText(tp) - case _ => super.toText(tp) - } - } - - /** Create explanation for single `Recorded` type or symbol */ - def explanation(entry: AnyRef)(using Context): String = { - def boundStr(bound: Type, default: ClassSymbol, cmp: String) = - if (bound.isRef(default)) "" else i"$cmp $bound" - - def boundsStr(bounds: TypeBounds): String = { - val lo = boundStr(bounds.lo, defn.NothingClass, ">:") - val hi = boundStr(bounds.hi, defn.AnyClass, "<:") - if (lo.isEmpty) hi - else if (hi.isEmpty) lo - else s"$lo and $hi" - } - - def addendum(cat: String, info: Type): String = info match { - case bounds @ TypeBounds(lo, hi) if bounds ne TypeBounds.empty => - if (lo eq hi) i" which is an alias of $lo" - else i" with $cat ${boundsStr(bounds)}" - case _ => - "" - } - - entry match { - case param: TypeParamRef => - s"is a type variable${addendum("constraint", TypeComparer.bounds(param))}" - case param: TermParamRef => - s"is a reference to a value parameter" - case sym: Symbol => - val info = - if (ctx.gadt.contains(sym)) - sym.info & ctx.gadt.fullBounds(sym) - else - sym.info - s"is a ${ctx.printer.kindString(sym)}${sym.showExtendedLocation}${addendum("bounds", info)}" - case tp: SkolemType => - s"is an unknown value of type ${tp.widen.show}" - } - } - - /** Turns a `Seen` into a `String` to produce an explanation for types on the - * form `where: T is...` - * - * @return string disambiguating types - */ - private def explanations(seen: Seen)(using Context): String = { - def needsExplanation(entry: Recorded) = entry match { - case param: TypeParamRef => ctx.typerState.constraint.contains(param) - case param: ParamRef => false - case skolem: SkolemType => true - case sym: Symbol => - ctx.gadt.contains(sym) && ctx.gadt.fullBounds(sym) != TypeBounds.empty - } - - val toExplain: List[(String, Recorded)] = seen.toList.flatMap { kvs => - val res: List[(String, Recorded)] = kvs match { - case (key, entry :: Nil) => - if (needsExplanation(entry)) (key.str, entry) :: Nil else Nil - case (key, entries) => - for (alt <- entries) yield { - val tickedString = seen.record(key.str, key.isType, alt) - (tickedString, alt) - } - } - res // help the inferrencer out - }.sortBy(_._1) - - def columnar(parts: List[(String, String)]): List[String] = { - lazy val maxLen = parts.map(_._1.length).max - parts.map { - case (leader, trailer) => - val variable = hl(leader) - s"""$variable${" " * (maxLen - leader.length)} $trailer""" - } - } - - val explainParts = toExplain.map { case (str, entry) => (str, explanation(entry)) } - val explainLines = columnar(explainParts) - if (explainLines.isEmpty) "" else i"where: $explainLines%\n %\n" - } - - private def errorMessageCtx(using Context): Context = - val ctx1 = ctx.property(MessageLimiter) match - case Some(_: ErrorMessageLimiter) => ctx - case _ => ctx.fresh.setProperty(MessageLimiter, ErrorMessageLimiter()) - ctx1.printer match - case _: ErrorMessagePrinter => ctx1 - case _ => ctx1.fresh.setPrinterFn(ctx => ErrorMessagePrinter(ctx)) - - /** Context with correct printer set for explanations */ - private def explainCtx(seen: Seen)(using Context): Context = - val ectx = errorMessageCtx - ectx.printer match - case dp: ExplainingPrinter => - ectx // re-use outer printer and defer explanation to it - case _ => - ectx.fresh.setPrinterFn(ctx => new ExplainingPrinter(seen)(ctx)) - - /** Entrypoint for explanation string interpolator: - * - * ``` - * ex"disambiguate $tpe1 and $tpe2" - * ``` - */ - def explained(op: Context ?=> String)(using Context): String = { - val seen = new Seen - val msg = op(using explainCtx(seen)) - val addendum = explanations(seen) - if (addendum.isEmpty) msg else msg ++ "\n\n" ++ addendum - } - - /** When getting a type mismatch it is useful to disambiguate placeholders like: - * - * ``` - * found: List[Int] - * required: List[T] - * where: T is a type in the initializer of value s which is an alias of - * String - * ``` - * - * @return the `where` section as well as the printing context for the - * placeholders - `("T is a...", printCtx)` - */ - def disambiguateTypes(args: Type*)(using Context): (String, Context) = { - val seen = new Seen - val printCtx = explainCtx(seen) - args.foreach(_.show(using printCtx)) // showing each member will put it into `seen` - (explanations(seen), printCtx) - } - /** This method will produce a colored type diff from the given arguments. * The idea is to do this for known cases that are useful and then fall back * on regular syntax highlighting for the cases which are unhandled. @@ -378,16 +159,13 @@ object Formatting { * @return the (found, expected, changePercentage) with coloring to * highlight the difference */ - def typeDiff(found: Type, expected: Type)(using Context): (String, String) = { - val fnd = wrapNonSensical(found, found.toText(ctx.printer)).show - val exp = wrapNonSensical(expected, expected.toText(ctx.printer)).show - - DiffUtil.mkColoredTypeDiff(fnd, exp) match { - case _ if ctx.settings.color.value == "never" => (fnd, exp) - case (fnd, exp, change) if change < 0.5 => (fnd, exp) + def typeDiff(found: Type, expected: Type)(using Context): (String, String) = + val fnd = found.show + val exp = expected.show + DiffUtil.mkColoredTypeDiff(fnd, exp) match + case (fnd1, exp1, change) + if change < 0.5 && ctx.settings.color.value != "never" => (fnd1, exp1) case _ => (fnd, exp) - } - } /** Explicit syntax highlighting */ def hl(s: String)(using Context): String = diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 3c83d681e716..700b3fbf525f 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -111,13 +111,19 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def refinementNameString(tp: RefinedType): String = nameString(tp.refinedName) /** String representation of a refinement */ - protected def toTextRefinement(rt: RefinedType): Closed = - (refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close + protected def toTextRefinement(rt: RefinedType): Text = + val keyword = rt.refinedInfo match { + case _: ExprType | _: MethodOrPoly => "def " + case _: TypeBounds => "type " + case _: TypeProxy => "val " + case _ => "" + } + (keyword ~ refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close - protected def argText(arg: Type): Text = homogenizeArg(arg) match { + protected def argText(arg: Type, isErased: Boolean = false): Text = keywordText("erased ").provided(isErased) ~ (homogenizeArg(arg) match { case arg: TypeBounds => "?" ~ toText(arg) case arg => toText(arg) - } + }) /** Pretty-print comma-separated type arguments for a constructor to be inserted among parentheses or brackets * (hence with `GlobalPrec` precedence). @@ -143,8 +149,21 @@ class PlainPrinter(_ctx: Context) extends Printer { + defn.ObjectClass + defn.FromJavaObjectSymbol - def toText(cs: CaptureSet): Text = - "{" ~ Text(cs.elems.toList.map(toTextCaptureRef), ", ") ~ "}" + def toTextCaptureSet(cs: CaptureSet): Text = + if printDebug && !cs.isConst then cs.toString + else if ctx.settings.YccDebug.value then cs.show + else if !cs.isConst && cs.elems.isEmpty then "?" + else "{" ~ Text(cs.elems.toList.map(toTextCaptureRef), ", ") ~ "}" + + /** Print capturing type, overridden in RefinedPrinter to account for + * capturing function types. + */ + protected def toTextCapturing(parent: Type, refsText: Text, boxText: Text): Text = + changePrec(InfixPrec): + boxText ~ toTextLocal(parent) ~ "^" + ~ (refsText provided refsText != rootSetText) + + final protected def rootSetText = Str("{cap}") def toText(tp: Type): Text = controlled { homogenize(tp) match { @@ -201,24 +220,13 @@ class PlainPrinter(_ctx: Context) extends Printer { (" <: " ~ toText(bound) provided !bound.isAny) }.close case tp @ EventuallyCapturingType(parent, refs) => - def box = - Str("box ") provided tp.isBoxed //&& ctx.settings.YccDebug.value - def printRegular(refsText: Text) = - changePrec(GlobalPrec)(box ~ refsText ~ " " ~ toText(parent)) - if printDebug && !refs.isConst then - printRegular(refs.toString) - else if ctx.settings.YccDebug.value then - printRegular(refs.show) - else if !refs.isConst && refs.elems.isEmpty then - printRegular("?") - else if Config.printCaptureSetsAsPrefix then - printRegular(toText(refs)) - else - changePrec(InfixPrec)(box ~ toText(parent) ~ " @retains(" ~ toText(refs.elems.toList, ",") ~ ")") + val boxText: Text = Str("box ") provided tp.isBoxed //&& ctx.settings.YccDebug.value + val refsText = if refs.isUniversal then rootSetText else toTextCaptureSet(refs) + toTextCapturing(parent, refsText, boxText) case tp: PreviousErrorType if ctx.settings.XprintTypes.value => "" // do not print previously reported error message because they may try to print this error type again recuresevely case tp: ErrorType => - s"" + s"" case tp: WildcardType => if (tp.optBounds.exists) "" else "" case NoType => @@ -229,21 +237,19 @@ class PlainPrinter(_ctx: Context) extends Printer { changePrec(GlobalPrec) { "(" ~ keywordText("using ").provided(tp.isContextualMethod) - ~ keywordText("erased ").provided(tp.isErasedMethod) ~ keywordText("implicit ").provided(tp.isImplicitMethod && !tp.isContextualMethod) ~ paramsText(tp) ~ ")" ~ (Str(": ") provided !tp.resultType.isInstanceOf[MethodOrPoly]) ~ toText(tp.resultType) } - case ExprType(ct @ EventuallyCapturingType(parent, refs)) - if ct.annot.symbol == defn.RetainsByNameAnnot => - if refs.isUniversal then changePrec(GlobalPrec) { "=> " ~ toText(parent) } - else toText(CapturingType(ExprType(parent), refs)) case ExprType(restp) => - changePrec(GlobalPrec) { - (if Feature.pureFunsEnabled then "-> " else "=> ") ~ toText(restp) - } + def arrowText: Text = restp match + case ct @ EventuallyCapturingType(parent, refs) if ct.annot.symbol == defn.RetainsByNameAnnot => + if refs.isUniversal then Str("=>") else Str("->") ~ toTextCaptureSet(refs) + case _ => + if Feature.pureFunsEnabled then "->" else "=>" + changePrec(GlobalPrec)(arrowText ~ " " ~ toText(restp)) case tp: HKTypeLambda => changePrec(GlobalPrec) { "[" ~ paramsText(tp) ~ "]" ~ lambdaHash(tp) ~ Str(" =>> ") ~ toTextGlobal(tp.resultType) @@ -258,8 +264,9 @@ class PlainPrinter(_ctx: Context) extends Printer { if annot.symbol == defn.InlineParamAnnot || annot.symbol == defn.ErasedParamAnnot then toText(tpe) else toTextLocal(tpe) ~ " " ~ toText(annot) case tp: TypeVar => + def toTextCaret(tp: Type) = if printDebug then toTextLocal(tp) ~ Str("^") else toText(tp) if (tp.isInstantiated) - toTextLocal(tp.instanceOpt) ~ (Str("^") provided printDebug) + toTextCaret(tp.instanceOpt) else { val constr = ctx.typerState.constraint val bounds = @@ -267,7 +274,7 @@ class PlainPrinter(_ctx: Context) extends Printer { withMode(Mode.Printing)(TypeComparer.fullBounds(tp.origin)) else TypeBounds.empty - if (bounds.isTypeAlias) toText(bounds.lo) ~ (Str("^") provided printDebug) + if (bounds.isTypeAlias) toTextCaret(bounds.lo) else if (ctx.settings.YshowVarBounds.value) "(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")" else toText(tp.origin) } @@ -278,6 +285,8 @@ class PlainPrinter(_ctx: Context) extends Printer { case ex: Throwable => Str("...") } "LazyRef(" ~ refTxt ~ ")" + case Range(lo, hi) => + toText(lo) ~ ".." ~ toText(hi) case _ => tp.fallbackToText(this) } @@ -287,9 +296,10 @@ class PlainPrinter(_ctx: Context) extends Printer { "(" ~ toTextRef(tp) ~ " : " ~ toTextGlobal(tp.underlying) ~ ")" protected def paramsText(lam: LambdaType): Text = { - def paramText(name: Name, tp: Type) = - toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp, isParameter = true) - Text(lam.paramNames.lazyZip(lam.paramInfos).map(paramText), ", ") + val erasedParams = lam.erasedParams + def paramText(name: Name, tp: Type, erased: Boolean) = + keywordText("erased ").provided(erased) ~ toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp, isParameter = true) + Text(lam.paramNames.lazyZip(lam.paramInfos).lazyZip(erasedParams).map(paramText), ", ") } protected def ParamRefNameString(name: Name): String = nameString(name) @@ -376,7 +386,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toTextCaptureRef(tp: Type): Text = homogenize(tp) match - case tp: TermRef if tp.symbol == defn.captureRoot => Str("*") + case tp: TermRef if tp.symbol == defn.captureRoot => Str("cap") case tp: SingletonType => toTextRef(tp) case _ => toText(tp) @@ -607,7 +617,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toText(sc: Scope): Text = ("Scope{" ~ dclsText(sc.toList) ~ "}").close - def toText[T >: Untyped](tree: Tree[T]): Text = { + def toText[T <: Untyped](tree: Tree[T]): Text = { def toTextElem(elem: Any): Text = elem match { case elem: Showable => elem.toText(this) case elem: List[?] => "List(" ~ Text(elem map toTextElem, ",") ~ ")" @@ -630,6 +640,13 @@ class PlainPrinter(_ctx: Context) extends Printer { else if (pos.source.exists) s"${pos.source.file.name}:${pos.line + 1}" else s"(no source file, offset = ${pos.span.point})" + def toText(cand: Candidate): Text = + "Cand(" + ~ toTextRef(cand.ref) + ~ (if cand.isConversion then " conv" else "") + ~ (if cand.isExtension then " ext" else "") + ~ Str(" L" + cand.level) ~ ")" + def toText(result: SearchResult): Text = result match { case result: SearchSuccess => "SearchSuccess: " ~ toText(result.ref) ~ " via " ~ toText(result.tree) @@ -689,8 +706,9 @@ class PlainPrinter(_ctx: Context) extends Printer { Text(ups.map(toText), ", ") Text(deps, "\n") } + val depsText = if Config.showConstraintDeps then c.depsToString else "" //Printer.debugPrintUnique = false - Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText)) + Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText, depsText)) finally ctx.typerState.constraint = savedConstraint diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index f06c70f56905..ab0c867ec31f 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -7,7 +7,7 @@ import Texts._, ast.Trees._ import Types.{Type, SingletonType, LambdaParam}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation, Contexts.Context -import typer.Implicits.SearchResult +import typer.Implicits.* import util.SourcePosition import typer.ImportInfo @@ -31,7 +31,7 @@ abstract class Printer { * ### `atPrec` vs `changePrec` * * This is to be used when changing precedence inside some sort of parentheses: - * for instance, to print T[A]` use + * for instance, to print `T[A]` use * `toText(T) ~ '[' ~ atPrec(GlobalPrec) { toText(A) } ~ ']'`. * * If the presence of the parentheses depends on precedence, inserting them manually is most certainly a bug. @@ -60,8 +60,7 @@ abstract class Printer { * A op B op' C parses as (A op B) op' C if op and op' are left-associative, and as * A op (B op' C) if they're right-associative, so we need respectively * ```scala - * val isType = ??? // is this a term or type operator? - * val prec = parsing.precedence(op, isType) + * val prec = parsing.precedence(op) * // either: * changePrec(prec) { toText(a) ~ op ~ atPrec(prec + 1) { toText(b) } } // for left-associative op and op' * // or: @@ -149,11 +148,14 @@ abstract class Printer { def toText(sc: Scope): Text /** Textual representation of tree */ - def toText[T >: Untyped](tree: Tree[T]): Text + def toText[T <: Untyped](tree: Tree[T]): Text /** Textual representation of source position */ def toText(pos: SourcePosition): Text + /** Textual representation of implicit candidates. */ + def toText(cand: Candidate): Text + /** Textual representation of implicit search result */ def toText(result: SearchResult): Text @@ -175,15 +177,15 @@ abstract class Printer { atPrec(GlobalPrec) { elem.toText(this) } /** Render elements alternating with `sep` string */ - def toText(elems: Traversable[Showable], sep: String): Text = + def toText(elems: Iterable[Showable], sep: String): Text = Text(elems map (_ toText this), sep) /** Render elements within highest precedence */ - def toTextLocal(elems: Traversable[Showable], sep: String): Text = + def toTextLocal(elems: Iterable[Showable], sep: String): Text = atPrec(DotPrec) { toText(elems, sep) } /** Render elements within lowest precedence */ - def toTextGlobal(elems: Traversable[Showable], sep: String): Text = + def toTextGlobal(elems: Iterable[Showable], sep: String): Text = atPrec(GlobalPrec) { toText(elems, sep) } /** A plain printer without any embellishments */ diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 2a87ec9b4bbe..51aaa0932e5e 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -3,6 +3,7 @@ package dotc package printing import core._ +import Constants.* import Texts._ import Types._ import Flags._ @@ -40,7 +41,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { override def printerContext: Context = myCtx - def withEnclosingDef(enclDef: Tree[? >: Untyped])(op: => Text): Text = { + def withEnclosingDef(enclDef: Tree[?])(op: => Text): Text = { val savedCtx = myCtx if (enclDef.hasType && enclDef.symbol.exists) myCtx = ctx.withOwner(enclDef.symbol) @@ -143,46 +144,55 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def arrow(isGiven: Boolean, isPure: Boolean): String = (if isGiven then "?" else "") + (if isPure then "->" else "=>") - override def toText(tp: Type): Text = controlled { - def toTextTuple(args: List[Type]): Text = - "(" ~ argsText(args) ~ ")" + private def toTextFunction(tp: AppliedType, refs: Text = Str("")): Text = + val AppliedType(tycon, args) = (tp: @unchecked) + val tsym = tycon.typeSymbol + val isGiven = tsym.name.isContextFunction + val capturesRoot = refs == rootSetText + val isPure = + Feature.pureFunsEnabled && !tsym.name.isImpureFunction && !capturesRoot + changePrec(GlobalPrec) { + val argStr: Text = + if args.length == 2 + && !defn.isTupleNType(args.head) + && !isGiven + then + atPrec(InfixPrec) { argText(args.head) } + else + "(" + ~ argsText(args.init) + ~ ")" + argStr + ~ " " ~ arrow(isGiven, isPure) + ~ (refs provided !capturesRoot) + ~ " " ~ argText(args.last) + } - def toTextFunction(args: List[Type], isGiven: Boolean, isErased: Boolean, isPure: Boolean): Text = + private def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: Text = Str("")): Text = info match + case info: MethodType => + val capturesRoot = refs == rootSetText changePrec(GlobalPrec) { - val argStr: Text = - if args.length == 2 - && !defn.isTupleNType(args.head) - && !isGiven && !isErased - then - atPrec(InfixPrec) { argText(args.head) } - else - "(" - ~ keywordText("erased ").provided(isErased) - ~ argsText(args.init) - ~ ")" - argStr ~ " " ~ arrow(isGiven, isPure) ~ " " ~ argText(args.last) + "(" + ~ paramsText(info) + ~ ") " + ~ arrow(info.isImplicitMethod, isPure && !capturesRoot) + ~ (refs provided !capturesRoot) + ~ " " + ~ toTextMethodAsFunction(info.resultType, isPure) + } + case info: PolyType => + changePrec(GlobalPrec) { + "[" + ~ paramsText(info) + ~ "] => " + ~ toTextMethodAsFunction(info.resultType, isPure) } + case _ => + toText(info) - def toTextMethodAsFunction(info: Type, isPure: Boolean): Text = info match - case info: MethodType => - changePrec(GlobalPrec) { - "(" - ~ keywordText("erased ").provided(info.isErasedMethod) - ~ paramsText(info) - ~ ") " - ~ arrow(info.isImplicitMethod, isPure) - ~ " " - ~ toTextMethodAsFunction(info.resultType, isPure) - } - case info: PolyType => - changePrec(GlobalPrec) { - "[" - ~ paramsText(info) - ~ "] => " - ~ toTextMethodAsFunction(info.resultType, isPure) - } - case _ => - toText(info) + override def toText(tp: Type): Text = controlled { + def toTextTuple(args: List[Type]): Text = + "(" ~ argsText(args) ~ ")" def isInfixType(tp: Type): Boolean = tp match case AppliedType(tycon, args) => @@ -223,9 +233,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => val tsym = tycon.typeSymbol if tycon.isRepeatedParam then toTextLocal(args.head) ~ "*" - else if defn.isFunctionSymbol(tsym) then - toTextFunction(args, tsym.name.isContextFunction, tsym.name.isErasedFunction, - isPure = Feature.pureFunsEnabled && !tsym.name.isImpureFunction) + else if tp.isConvertibleParam then "into " ~ toText(args.head) + else if defn.isFunctionSymbol(tsym) then toTextFunction(tp) else if isInfixType(tp) then val l :: r :: Nil = args: @unchecked val opName = tyconName(tycon) @@ -285,14 +294,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tp: ViewProto => toText(tp.argType) ~ " ?=>? " ~ toText(tp.resultType) case tp @ FunProto(args, resultType) => - val argsText = args match { - case dummyTreeOfType(tp) :: Nil if !(tp isRef defn.NullClass) => "null: " ~ toText(tp) - case _ => toTextGlobal(args, ", ") - } "[applied to (" ~ keywordText("using ").provided(tp.isContextualMethod) - ~ keywordText("erased ").provided(tp.isErasedMethod) - ~ argsText + ~ argsTreeText(args) ~ ") returning " ~ toText(resultType) ~ "]" @@ -308,15 +312,19 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def exprToText(tp: ExprType): Text = "=> " ~ toText(tp.resType) - protected def blockToText[T >: Untyped](block: Block[T]): Text = + protected def argsTreeText(args: List[untpd.Tree]): Text = args match + case dummyTreeOfType(tp) :: Nil if !tp.isRef(defn.NullClass) && !homogenizedView => toText(Constant(null)) ~ ": " ~ toText(tp) + case _ => toTextGlobal(args, ", ") + + protected def blockToText[T <: Untyped](block: Block[T]): Text = blockText(block.stats :+ block.expr) - protected def blockText[T >: Untyped](trees: List[Tree[T]]): Text = + protected def blockText[T <: Untyped](trees: List[Tree[T]]): Text = inContextBracket { ("{" ~ toText(trees, "\n") ~ "}").close } - protected def typeApplyText[T >: Untyped](tree: TypeApply[T]): Text = { + protected def typeApplyText[T <: Untyped](tree: TypeApply[T]): Text = { val funText = toTextLocal(tree.fun) tree.fun match { case Select(New(tpt), nme.CONSTRUCTOR) if tpt.typeOpt.dealias.isInstanceOf[AppliedType] => @@ -326,7 +334,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - protected def toTextCore[T >: Untyped](tree: Tree[T]): Text = { + protected def toTextCore[T <: Untyped](tree: Tree[T]): Text = { import untpd._ def isLocalThis(tree: Tree) = tree.typeOpt match { @@ -433,15 +441,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec (GlobalPrec) { keywordStr("throw ") ~ toText(args.head) } - else if (!printDebug && fun.hasType && fun.symbol == defn.QuotedRuntime_exprQuote) - keywordStr("'{") ~ toTextGlobal(args, ", ") ~ keywordStr("}") - else if (!printDebug && fun.hasType && fun.symbol.isExprSplice) - keywordStr("${") ~ toTextGlobal(args, ", ") ~ keywordStr("}") else toTextLocal(fun) ~ "(" ~ Str("using ").provided(app.applyKind == ApplyKind.Using && !homogenizedView) - ~ toTextGlobal(args, ", ") + ~ argsTreeText(args) ~ ")" case tree: TypeApply => typeApplyText(tree) @@ -523,9 +527,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case SeqLiteral(elems, elemtpt) => "[" ~ toTextGlobal(elems, ",") ~ " : " ~ toText(elemtpt) ~ "]" case tree @ Inlined(call, bindings, body) => - (("/* inlined from " ~ (if (call.isEmpty) "outside" else toText(call)) ~ " */ ") `provided` - !homogenizedView && ctx.settings.XprintInline.value) ~ - (if bindings.isEmpty then toText(body) else blockText(bindings :+ body)) + val bodyText = if bindings.isEmpty then toText(body) else blockText(bindings :+ body) + if homogenizedView || !ctx.settings.XprintInline.value then bodyText + else if call.isEmpty then stringText("{{") ~ stringText("/* inlined from outside */") ~ bodyText ~ stringText("}}") + else keywordText("{{") ~ keywordText("/* inlined from ") ~ toText(call) ~ keywordText(" */") ~ bodyText ~ keywordText("}}") case tpt: untpd.DerivedTypeTree => "" case TypeTree() => @@ -561,14 +566,15 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toText(sel) ~ keywordStr(" match ") ~ blockText(cases) ~ (" <: " ~ toText(bound) provided !bound.isEmpty) } + case ImpureByNameTypeTree(tpt) => + "=> " ~ toTextLocal(tpt) case ByNameTypeTree(tpt) => - (if Feature.pureFunsEnabled then "-> " else "=> ") - ~ toTextLocal(tpt) + (if Feature.pureFunsEnabled then "-> " else "=> ") ~ toTextLocal(tpt) case TypeBoundsTree(lo, hi, alias) => if (lo eq hi) && alias.isEmpty then optText(lo)(" = " ~ _) else optText(lo)(" >: " ~ _) ~ optText(hi)(" <: " ~ _) ~ optText(alias)(" = " ~ _) case bind @ Bind(name, body) => - keywordText("given ").provided(tree.symbol.isOneOf(GivenOrImplicit) && !homogenizedView) ~ // Used for scala.quoted.Type in quote patterns (not pickled) + toTextOwner(bind) ~ keywordText("given ").provided(tree.symbol.isOneOf(GivenOrImplicit) && !homogenizedView) ~ // Used for scala.quoted.Type in quote patterns (not pickled) changePrec(InfixPrec) { nameIdText(bind) ~ " @ " ~ toText(body) } case Alternative(trees) => changePrec(OrPrec) { toText(trees, " | ") } @@ -622,7 +628,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextAnnot = toTextLocal(arg) ~~ annotText(annot.symbol.enclosingClass, annot) def toTextRetainsAnnot = - try changePrec(GlobalPrec)(toText(captureSet) ~ " " ~ toText(arg)) + try changePrec(GlobalPrec)(toText(arg) ~ "^" ~ toTextCaptureSet(captureSet)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner == defn.RetainsAnnot && Feature.ccEnabled && Config.printCaptureSetsAsPrefix && !printDebug @@ -647,27 +653,29 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case str: Literal => strText(str) } toText(id) ~ "\"" ~ Text(segments map segmentText, "") ~ "\"" - case Function(args, body) => + case fn @ Function(args, body) => var implicitSeen: Boolean = false var isGiven: Boolean = false - var isErased: Boolean = false - def argToText(arg: Tree) = arg match { + val erasedParams = fn match { + case fn: FunctionWithMods => fn.erasedParams + case _ => fn.args.map(_ => false) + } + def argToText(arg: Tree, isErased: Boolean) = arg match { case arg @ ValDef(name, tpt, _) => val implicitText = if ((arg.mods.is(Given))) { isGiven = true; "" } - else if ((arg.mods.is(Erased))) { isErased = true; "" } else if ((arg.mods.is(Implicit)) && !implicitSeen) { implicitSeen = true; keywordStr("implicit ") } else "" - implicitText ~ toText(name) ~ optAscription(tpt) + val erasedText = if isErased then keywordStr("erased ") else "" + implicitText ~ erasedText ~ toText(name) ~ optAscription(tpt) case _ => toText(arg) } val argsText = args match { - case (arg @ ValDef(_, tpt, _)) :: Nil if tpt.isEmpty => argToText(arg) + case (arg @ ValDef(_, tpt, _)) :: Nil if tpt.isEmpty => argToText(arg, erasedParams(0)) case _ => "(" - ~ keywordText("erased ").provided(isErased) - ~ Text(args.map(argToText), ", ") + ~ Text(args.zip(erasedParams).map(argToText), ", ") ~ ")" } val isPure = @@ -714,32 +722,47 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } case Number(digits, kind) => digits - case Quote(tree) if tree.isTerm => - keywordStr("'{") ~ toTextGlobal(dropBlock(tree)) ~ keywordStr("}") - case Splice(tree) => - keywordStr("${") ~ toTextGlobal(dropBlock(tree)) ~ keywordStr("}") case Thicket(trees) => "Thicket {" ~~ toTextGlobal(trees, "\n") ~~ "}" case MacroTree(call) => keywordStr("macro ") ~ toTextGlobal(call) - case Hole(isTermHole, idx, args, content, tpt) => - val (prefix, postfix) = if isTermHole then ("{{{", "}}}") else ("[[[", "]]]") + case tree @ Quote(body, tags) => + val tagsText = (keywordStr("<") ~ toTextGlobal(tags, ", ") ~ keywordStr(">")).provided(tree.tags.nonEmpty) + val exprTypeText = (keywordStr("[") ~ toTextGlobal(tree.bodyType) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) + val open = if (body.isTerm) keywordStr("{") else keywordStr("[") + val close = if (body.isTerm) keywordStr("}") else keywordStr("]") + keywordStr("'") ~ tagsText ~ exprTypeText ~ open ~ toTextGlobal(body) ~ close + case Splice(expr) => + val spliceTypeText = (keywordStr("[") ~ toTextGlobal(tree.typeOpt) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) + keywordStr("$") ~ spliceTypeText ~ keywordStr("{") ~ toTextGlobal(expr) ~ keywordStr("}") + case SplicePattern(pattern, args) => + val spliceTypeText = (keywordStr("[") ~ toTextGlobal(tree.typeOpt) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) + keywordStr("$") ~ spliceTypeText ~ { + if args.isEmpty then keywordStr("{") ~ inPattern(toText(pattern)) ~ keywordStr("}") + else toText(pattern.symbol.name) ~ "(" ~ toTextGlobal(args, ", ") ~ ")" + } + case Hole(isTerm, idx, args, content) => + val (prefix, postfix) = if isTerm then ("{{{", "}}}") else ("[[[", "]]]") val argsText = toTextGlobal(args, ", ") val contentText = toTextGlobal(content) - val tptText = toTextGlobal(tpt) - prefix ~~ idx.toString ~~ "|" ~~ tptText ~~ "|" ~~ argsText ~~ "|" ~~ contentText ~~ postfix - case CapturingTypeTree(refs, parent) => - parent match - case ImpureByNameTypeTree(bntpt) => - "=> " ~ toTextLocal(bntpt) - case _ => - changePrec(GlobalPrec)("{" ~ Text(refs.map(toText), ", ") ~ "} " ~ toText(parent)) + val tpeText = toTextGlobal(tree.typeOpt) + prefix ~~ idx.toString ~~ "|" ~~ tpeText ~~ "|" ~~ argsText ~~ "|" ~~ contentText ~~ postfix + case CapturesAndResult(refs, parent) => + changePrec(GlobalPrec)("^{" ~ Text(refs.map(toText), ", ") ~ "}" ~ toText(parent)) case _ => tree.fallbackToText(this) } } - override def toText[T >: Untyped](tree: Tree[T]): Text = controlled { + override protected def toTextCapturing(tp: Type, refsText: Text, boxText: Text): Text = tp match + case tp: AppliedType if defn.isFunctionSymbol(tp.typeSymbol) && !printDebug => + boxText ~ toTextFunction(tp, refsText) + case tp: RefinedType if defn.isFunctionOrPolyType(tp) && !printDebug => + boxText ~ toTextMethodAsFunction(tp.refinedInfo, isPure = !tp.typeSymbol.name.isImpureFunction, refsText) + case _ => + super.toTextCapturing(tp, refsText, boxText) + + override def toText[T <: Untyped](tree: Tree[T]): Text = controlled { import untpd._ var txt = toTextCore(tree) @@ -826,7 +849,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def dropAnnotForModText(sym: Symbol): Boolean = sym == defn.BodyAnnot - protected def optAscription[T >: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) + protected def optAscription[T <: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _) private def idText(tree: untpd.Tree): Text = (if showUniqueIds && tree.hasType && tree.symbol.exists then s"#${tree.symbol.id}" else "") ~ @@ -842,7 +865,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def useSymbol(tree: untpd.Tree) = tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value - protected def nameIdText[T >: Untyped](tree: NameTree[T]): Text = + protected def nameIdText[T <: Untyped](tree: NameTree[T]): Text = if (tree.hasType && tree.symbol.exists) { val str = nameString(tree.symbol) tree match { @@ -856,26 +879,25 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def toTextOwner(tree: Tree[?]) = "[owner = " ~ tree.symbol.maybeOwner.show ~ "]" provided ctx.settings.YprintDebugOwners.value - protected def dclTextOr[T >: Untyped](tree: Tree[T])(treeText: => Text): Text = + protected def dclTextOr[T <: Untyped](tree: Tree[T])(treeText: => Text): Text = toTextOwner(tree) ~ { if (useSymbol(tree)) annotsText(tree.symbol) ~~ dclText(tree.symbol) else treeText } - def paramsText[T>: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match + def paramsText[T <: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match case Nil => "()" case untpd.ValDefs(vparams @ (vparam :: _)) => "(" ~ keywordText("using ").provided(vparam.mods.is(Given)) - ~ keywordText("erased ").provided(vparam.mods.is(Erased)) ~ toText(vparams, ", ") ~ ")" case untpd.TypeDefs(tparams) => "[" ~ toText(tparams, ", ") ~ "]" - def addParamssText[T >: Untyped](leading: Text, paramss: List[ParamClause[T]]): Text = + def addParamssText[T <: Untyped](leading: Text, paramss: List[ParamClause[T]]): Text = paramss.foldLeft(leading)((txt, params) => txt ~ paramsText(params)) - protected def valDefToText[T >: Untyped](tree: ValDef[T]): Text = { + protected def valDefToText[T <: Untyped](tree: ValDef[T]): Text = { dclTextOr(tree) { modText(tree.mods, tree.symbol, keywordStr(if (tree.mods.is(Mutable)) "var" else "val"), isType = false) ~~ valDefText(nameIdText(tree)) ~ optAscription(tree.tpt) ~ @@ -883,7 +905,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - protected def defDefToText[T >: Untyped](tree: DefDef[T]): Text = { + protected def defDefToText[T <: Untyped](tree: DefDef[T]): Text = { import untpd._ dclTextOr(tree) { val defKeyword = modText(tree.mods, tree.symbol, keywordStr("def"), isType = false) @@ -893,30 +915,31 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if isExtension then val paramss = if tree.name.isRightAssocOperatorName then + // If you change the names of the clauses below, also change them in right-associative-extension-methods.md // we have the following encoding of tree.paramss: - // (leadingTyParamss ++ leadingUsing - // ++ rightTyParamss ++ rightParamss - // ++ leftParamss ++ trailingUsing ++ rest) + // (leftTyParams ++ leadingUsing + // ++ rightTyParams ++ rightParam + // ++ leftParam ++ trailingUsing ++ rest) // e.g. // extension [A](using B)(c: C)(using D) // def %:[E](f: F)(g: G)(using H): Res = ??? // will have the following values: - // - leadingTyParamss = List(`[A]`) + // - leftTyParams = List(`[A]`) // - leadingUsing = List(`(using B)`) - // - rightTyParamss = List(`[E]`) - // - rightParamss = List(`(f: F)`) - // - leftParamss = List(`(c: C)`) + // - rightTyParams = List(`[E]`) + // - rightParam = List(`(f: F)`) + // - leftParam = List(`(c: C)`) // - trailingUsing = List(`(using D)`) // - rest = List(`(g: G)`, `(using H)`) - // we need to swap (rightTyParams ++ rightParamss) with (leftParamss ++ trailingUsing) - val (leadingTyParamss, rest1) = tree.paramss.span(isTypeParamClause) + // we need to swap (rightTyParams ++ rightParam) with (leftParam ++ trailingUsing) + val (leftTyParams, rest1) = tree.paramss.span(isTypeParamClause) val (leadingUsing, rest2) = rest1.span(isUsingClause) - val (rightTyParamss, rest3) = rest2.span(isTypeParamClause) - val (rightParamss, rest4) = rest3.splitAt(1) - val (leftParamss, rest5) = rest4.splitAt(1) + val (rightTyParams, rest3) = rest2.span(isTypeParamClause) + val (rightParam, rest4) = rest3.splitAt(1) + val (leftParam, rest5) = rest4.splitAt(1) val (trailingUsing, rest6) = rest5.span(isUsingClause) - if leftParamss.nonEmpty then - leadingTyParamss ::: leadingUsing ::: leftParamss ::: trailingUsing ::: rightTyParamss ::: rightParamss ::: rest6 + if leftParam.nonEmpty then + leftTyParams ::: leadingUsing ::: leftParam ::: trailingUsing ::: rightTyParams ::: rightParam ::: rest6 else tree.paramss // it wasn't a binary operator, after all. else @@ -989,8 +1012,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { ) } - protected def toTextPackageId[T >: Untyped](pid: Tree[T]): Text = - if (homogenizedView && pid.hasType) toTextLocal(pid.tpe.asInstanceOf[Showable]) + protected def toTextPackageId[T <: Untyped](pid: Tree[T]): Text = + if (homogenizedView && pid.hasType) toTextLocal(pid.typeOpt) else toTextLocal(pid) protected def packageDefText(tree: PackageDef): Text = { @@ -1028,7 +1051,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else PrintableFlags(isType) if (homogenizedView && mods.flags.isTypeFlags) flagMask &~= GivenOrImplicit // drop implicit/given from classes val rawFlags = if (sym.exists) sym.flagsUNSAFE else mods.flags - if (rawFlags.is(Param)) flagMask = flagMask &~ Given &~ Erased + if (rawFlags.is(Param)) flagMask = flagMask &~ Given val flags = rawFlags & flagMask var flagsText = toTextFlags(sym, flags) val annotTexts = @@ -1044,10 +1067,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def optText(name: Name)(encl: Text => Text): Text = if (name.isEmpty) "" else encl(toText(name)) - def optText[T >: Untyped](tree: Tree[T])(encl: Text => Text): Text = + def optText[T <: Untyped](tree: Tree[T])(encl: Text => Text): Text = if (tree.isEmpty) "" else encl(toText(tree)) - def optText[T >: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text = + def optText[T <: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text = if (tree.exists(!_.isEmpty)) encl(blockText(tree)) else "" override protected def treatAsTypeParam(sym: Symbol): Boolean = sym.is(TypeParam) @@ -1060,7 +1083,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (sym.isImport) sym.infoOrCompleter match { case info: Namer#Completer => return info.original.show - case info: ImportType => return s"import $info.expr.show" + case info: ImportType => return s"import ${info.expr.show}" case _ => } def name = diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala index 17f86e766869..475e2c6900d5 100644 --- a/compiler/src/dotty/tools/dotc/printing/Texts.scala +++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala @@ -1,8 +1,12 @@ package dotty.tools.dotc package printing +import scala.annotation.internal.sharable object Texts { + @sharable + private val ansi = java.util.regex.Pattern.compile("\u001b\\[\\d+m").nn + sealed abstract class Text { protected def indentMargin: Int = 2 @@ -15,12 +19,17 @@ object Texts { case Vertical(relems) => relems.isEmpty } + // Str Ver Clo Flu + // isVertical F T F F + // isClosed F T T F + // isFluid F F T T + // isSplittable F F F T def isVertical: Boolean = isInstanceOf[Vertical] def isClosed: Boolean = isVertical || isInstanceOf[Closed] def isFluid: Boolean = isInstanceOf[Fluid] def isSplittable: Boolean = isFluid && !isClosed - def close: Closed = new Closed(relems) + def close: Text = if isSplittable then Closed(relems) else this def remaining(width: Int): Int = this match { case Str(s, _) => @@ -53,7 +62,7 @@ object Texts { } private def appendIndented(that: Text)(width: Int): Text = - Vertical(that.layout(width - indentMargin).indented :: this.relems) + Fluid(that.layout(width - indentMargin).indented :: this.relems) private def append(width: Int)(that: Text): Text = if (this.isEmpty) that.layout(width) @@ -65,7 +74,7 @@ object Texts { else appendIndented(that)(width) private def lengthWithoutAnsi(str: String): Int = - str.replaceAll("\u001b\\[\\d+m", "").nn.length + ansi.matcher(str).nn.replaceAll("").nn.length def layout(width: Int): Text = this match { case Str(s, _) => @@ -113,7 +122,7 @@ object Texts { sb.append("|") } } - sb.append(s) + sb.append(s.replaceAll("[ ]+$", "")) case _ => var follow = false for (elem <- relems.reverse) { @@ -138,7 +147,13 @@ object Texts { def ~ (that: Text): Text = if (this.isEmpty) that else if (that.isEmpty) this - else Fluid(that :: this :: Nil) + else this match + case Fluid(relems1) if !isClosed => that match + case Fluid(relems2) if !that.isClosed => Fluid(relems2 ++ relems1) + case _ => Fluid(that +: relems1) + case _ => that match + case Fluid(relems2) if !that.isClosed => Fluid(relems2 :+ this) + case _ => Fluid(that :: this :: Nil) def ~~ (that: Text): Text = if (this.isEmpty) that @@ -158,16 +173,16 @@ object Texts { /** A concatenation of elements in `xs` and interspersed with * separator strings `sep`. */ - def apply(xs: Traversable[Text], sep: String = " "): Text = + def apply(xs: Iterable[Text], sep: String = " "): Text = if (sep == "\n") lines(xs) else { - val ys = xs filterNot (_.isEmpty) + val ys = xs.filterNot(_.isEmpty) if (ys.isEmpty) Str("") - else ys reduce (_ ~ sep ~ _) + else ys.reduceRight((a, b) => (a ~ sep).close ~ b) } /** The given texts `xs`, each on a separate line */ - def lines(xs: Traversable[Text]): Vertical = Vertical(xs.toList.reverse) + def lines(xs: Iterable[Text]): Vertical = Vertical(xs.toList.reverse) extension (text: => Text) def provided(cond: Boolean): Text = if (cond) text else Str("") @@ -176,12 +191,16 @@ object Texts { case class Str(s: String, lineRange: LineRange = EmptyLineRange) extends Text { override def relems: List[Text] = List(this) + override def toString = this match + case Str(s, EmptyLineRange) => s"Str($s)" + case Str(s, lineRange) => s"Str($s, $lineRange)" } case class Vertical(relems: List[Text]) extends Text case class Fluid(relems: List[Text]) extends Text - class Closed(relems: List[Text]) extends Fluid(relems) + class Closed(relems: List[Text]) extends Fluid(relems): + override def productPrefix = "Closed" implicit def stringToText(s: String): Text = Str(s) diff --git a/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java b/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java index 68ae4f148cfd..60f44db16add 100644 --- a/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java +++ b/compiler/src/dotty/tools/dotc/profile/ExtendedThreadMxBean.java @@ -248,13 +248,14 @@ public SunThreadMxBean(ThreadMXBean underlying) { super(underlying); this.real = underlying; try { - getThreadUserTimeMethod = real.getClass().getMethod("getThreadUserTime", long[].class); - isThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("isThreadAllocatedMemoryEnabled"); - setThreadAllocatedMemoryEnabledMethod = real.getClass().getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); - getThreadAllocatedBytesMethod1 = real.getClass().getMethod("getThreadAllocatedBytes", Long.TYPE); - getThreadAllocatedBytesMethod2 = real.getClass().getMethod("getThreadAllocatedBytes", long[].class); - isThreadAllocatedMemorySupportedMethod = real.getClass().getMethod("isThreadAllocatedMemorySupported"); - getThreadCpuTimeMethod = real.getClass().getMethod("getThreadCpuTime", long[].class); + Class cls = Class.forName("com.sun.management.ThreadMXBean"); + getThreadUserTimeMethod = cls.getMethod("getThreadUserTime", long[].class); + isThreadAllocatedMemoryEnabledMethod = cls.getMethod("isThreadAllocatedMemoryEnabled"); + setThreadAllocatedMemoryEnabledMethod = cls.getMethod("setThreadAllocatedMemoryEnabled", Boolean.TYPE); + getThreadAllocatedBytesMethod1 = cls.getMethod("getThreadAllocatedBytes", Long.TYPE); + getThreadAllocatedBytesMethod2 = cls.getMethod("getThreadAllocatedBytes", long[].class); + isThreadAllocatedMemorySupportedMethod = cls.getMethod("isThreadAllocatedMemorySupported"); + getThreadCpuTimeMethod = cls.getMethod("getThreadCpuTime", long[].class); getThreadUserTimeMethod.setAccessible(true); isThreadAllocatedMemoryEnabledMethod.setAccessible(true); diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index 25c53903c10b..64cc08160701 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -13,6 +13,7 @@ import javax.management.{Notification, NotificationEmitter, NotificationListener import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.Contexts._ import dotty.tools.io.AbstractFile +import annotation.internal.sharable object Profiler { def apply()(using Context): Profiler = @@ -217,14 +218,16 @@ sealed trait ProfileReporter { } object ConsoleProfileReporter extends ProfileReporter { - + @sharable var totalAlloc = 0L override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? + reportCommon(EventType.BACKGROUND, profiler, threadRange) override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? + reportCommon(EventType.MAIN, profiler, threadRange) + @nowarn("cat=deprecation") + private def reportCommon(tpe:EventType, profiler: RealProfiler, threadRange: ProfileRange): Unit = + totalAlloc += threadRange.allocatedBytes + println(s"${threadRange.phase.phaseName.replace(',', ' ')},run ns = ${threadRange.runNs},idle ns = ${threadRange.idleNs},cpu ns = ${threadRange.cpuNs},user ns = ${threadRange.userNs},allocated = ${threadRange.allocatedBytes},heap at end = ${threadRange.end.heapBytes}, total allocated = $totalAlloc ") override def close(profiler: RealProfiler): Unit = () diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala new file mode 100644 index 000000000000..c9a77dbfa151 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -0,0 +1,368 @@ +package dotty.tools.dotc +package quoted + +import scala.language.unsafeNulls + +import scala.collection.mutable +import scala.reflect.ClassTag + +import java.io.{PrintWriter, StringWriter} +import java.lang.reflect.{InvocationTargetException, Method => JLRMethod} + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.TreeMapWithImplicits +import dotty.tools.dotc.core.Annotations._ +import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Denotations.staticRef +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.NameKinds.FlatName +import dotty.tools.dotc.core.Names._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.TypeErasure +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.typer.ImportInfo.withRootImports +import dotty.tools.dotc.util.SrcPos +import dotty.tools.dotc.reporting.Message +import dotty.tools.repl.AbstractFileClassLoader +import dotty.tools.dotc.core.CyclicReference + +/** Tree interpreter for metaprogramming constructs */ +class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): + import Interpreter._ + import tpd._ + + val classLoader = + if ctx.owner.topLevelClass.name.startsWith(str.REPL_SESSION_LINE) then + new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader0) + else classLoader0 + + /** Local variable environment */ + type Env = Map[Symbol, Object] + def emptyEnv: Env = Map.empty + inline def env(using e: Env): e.type = e + + /** Returns the result of interpreting the code in the tree. + * Return Some of the result or None if the result type is not consistent with the expected type. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + */ + final def interpret[T](tree: Tree)(using ct: ClassTag[T]): Option[T] = + interpretTree(tree)(using emptyEnv) match { + case obj: T => Some(obj) + case obj => + // TODO upgrade to a full type tag check or something similar + report.error(em"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) + None + } + + /** Returns the result of interpreting the code in the tree. + * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. + */ + protected def interpretTree(tree: Tree)(using Env): Object = tree match { + case Literal(Constant(value)) => + interpretLiteral(value) + + case tree: Ident if tree.symbol.is(Inline, butNot = Method) => + tree.tpe.widenTermRefExpr match + case ConstantType(c) => c.value.asInstanceOf[Object] + case _ => throw new StopInterpretation(em"${tree.symbol} could not be inlined", tree.srcPos) + + // TODO disallow interpreted method calls as arguments + case Call(fn, args) => + if (fn.symbol.isConstructor) + interpretNew(fn.symbol, args.flatten.map(interpretTree)) + else if (fn.symbol.is(Module)) + interpretModuleAccess(fn.symbol) + else if (fn.symbol.is(Method) && fn.symbol.isStatic) { + interpretedStaticMethodCall(fn.symbol.owner, fn.symbol, interpretArgs(args, fn.symbol.info)) + } + else if fn.symbol.isStatic then + assert(args.isEmpty) + interpretedStaticFieldAccess(fn.symbol) + else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) + if (fn.name == nme.asInstanceOfPM) + interpretModuleAccess(fn.qualifier.symbol) + else { + interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol, interpretArgs(args, fn.symbol.info)) + } + else if (env.contains(fn.symbol)) + env(fn.symbol) + else if (tree.symbol.is(InlineProxy)) + interpretTree(tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs) + else + unexpectedTree(tree) + + case closureDef((ddef @ DefDef(_, ValDefs(arg :: Nil) :: Nil, _, _))) => + (obj: AnyRef) => interpretTree(ddef.rhs)(using env.updated(arg.symbol, obj)) + + // Interpret `foo(j = x, i = y)` which it is expanded to + // `val j$1 = x; val i$1 = y; foo(i = i$1, j = j$1)` + case Block(stats, expr) => interpretBlock(stats, expr) + case NamedArg(_, arg) => interpretTree(arg) + + case Inlined(_, bindings, expansion) => interpretBlock(bindings, expansion) + + case Typed(expr, _) => + interpretTree(expr) + + case SeqLiteral(elems, _) => + interpretVarargs(elems.map(e => interpretTree(e))) + + case _ => + unexpectedTree(tree) + } + + private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = { + def interpretArgsGroup(args: List[Tree], argTypes: List[Type]): List[Object] = + assert(args.size == argTypes.size) + val view = + for (arg, info) <- args.lazyZip(argTypes) yield + info match + case _: ExprType => () => interpretTree(arg) // by-name argument + case _ => interpretTree(arg) // by-value argument + view.toList + + fnType.dealias match + case fnType: MethodType if fnType.hasErasedParams => interpretArgs(argss, fnType.resType) + case fnType: MethodType => + val argTypes = fnType.paramInfos + assert(argss.head.size == argTypes.size) + interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, fnType.resType) + case fnType: AppliedType if defn.isContextFunctionType(fnType) => + val argTypes :+ resType = fnType.args: @unchecked + interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, resType) + case fnType: PolyType => interpretArgs(argss, fnType.resType) + case fnType: ExprType => interpretArgs(argss, fnType.resType) + case _ => + assert(argss.isEmpty) + Nil + } + + private def interpretBlock(stats: List[Tree], expr: Tree)(using Env) = { + var unexpected: Option[Object] = None + val newEnv = stats.foldLeft(env)((accEnv, stat) => stat match + case stat: ValDef => + accEnv.updated(stat.symbol, interpretTree(stat.rhs)(using accEnv)) + case stat => + if (unexpected.isEmpty) + unexpected = Some(unexpectedTree(stat)) + accEnv + ) + unexpected.getOrElse(interpretTree(expr)(using newEnv)) + } + + private def interpretLiteral(value: Any): Object = + value.asInstanceOf[Object] + + private def interpretVarargs(args: List[Object]): Object = + args.toSeq + + private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol, args: List[Object]): Object = { + val inst = + try loadModule(moduleClass) + catch + case MissingClassDefinedInCurrentRun(sym) => + suspendOnMissing(sym, pos) + val clazz = inst.getClass + val name = fn.name.asTermName + val method = getMethod(clazz, name, paramsSig(fn)) + stopIfRuntimeException(method.invoke(inst, args: _*), method) + } + + private def interpretedStaticFieldAccess(sym: Symbol): Object = { + val clazz = loadClass(sym.owner.fullName.toString) + val field = clazz.getField(sym.name.toString) + field.get(null) + } + + private def interpretModuleAccess(fn: Symbol): Object = + loadModule(fn.moduleClass) + + private def interpretNew(fn: Symbol, args: List[Object]): Object = { + val className = fn.owner.fullName.mangledString.replaceAll("\\$\\.", "\\$") + val clazz = loadClass(className) + val constr = clazz.getConstructor(paramsSig(fn): _*) + constr.newInstance(args: _*).asInstanceOf[Object] + } + + private def unexpectedTree(tree: Tree): Object = + throw new StopInterpretation(em"Unexpected tree could not be interpreted: ${tree.toString}", tree.srcPos) + + private def loadModule(sym: Symbol): Object = + if (sym.owner.is(Package)) { + // is top level object + val moduleClass = loadClass(sym.fullName.toString) + moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) + } + else { + // nested object in an object + val clazz = loadClass(sym.binaryClassName) + clazz.getConstructor().newInstance().asInstanceOf[Object] + } + + private def loadReplLineClass(moduleClass: Symbol): Class[?] = { + val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) + lineClassloader.loadClass(moduleClass.name.firstPart.toString) + } + + private def loadClass(name: String): Class[?] = + try classLoader.loadClass(name) + catch + case MissingClassDefinedInCurrentRun(sym) => + suspendOnMissing(sym, pos) + + + private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = + try clazz.getMethod(name.toString, paramClasses: _*) + catch { + case _: NoSuchMethodException => + val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" + throw new StopInterpretation(msg, pos) + case MissingClassDefinedInCurrentRun(sym) => + suspendOnMissing(sym, pos) + } + + private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = + try thunk + catch { + case ex: RuntimeException => + val sw = new StringWriter() + sw.write("A runtime exception occurred while executing macro expansion\n") + sw.write(ex.getMessage) + sw.write("\n") + ex.printStackTrace(new PrintWriter(sw)) + sw.write("\n") + throw new StopInterpretation(sw.toString.toMessage, pos) + case ex: InvocationTargetException => + ex.getTargetException match { + case ex: scala.quoted.runtime.StopMacroExpansion => + throw ex + case MissingClassDefinedInCurrentRun(sym) => + suspendOnMissing(sym, pos) + case targetException => + val sw = new StringWriter() + sw.write("Exception occurred while executing macro expansion.\n") + if (!ctx.settings.Ydebug.value) { + val end = targetException.getStackTrace.lastIndexWhere { x => + x.getClassName == method.getDeclaringClass.getCanonicalName && x.getMethodName == method.getName + } + val shortStackTrace = targetException.getStackTrace.take(end + 1) + targetException.setStackTrace(shortStackTrace) + targetException.printStackTrace(new PrintWriter(sw)) + + targetException match + case _: CyclicReference => sw.write("\nSee full stack trace using -Ydebug") + case _ => + } else { + targetException.printStackTrace(new PrintWriter(sw)) + } + sw.write("\n") + throw new StopInterpretation(sw.toString.toMessage, pos) + } + } + + /** List of classes of the parameters of the signature of `sym` */ + private def paramsSig(sym: Symbol): List[Class[?]] = { + def paramClass(param: Type): Class[?] = { + def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match { + case JavaArrayType(elemType) => arrayDepth(elemType, depth + 1) + case _ => (tpe, depth) + } + def javaArraySig(tpe: Type): String = { + val (elemType, depth) = arrayDepth(tpe, 0) + val sym = elemType.classSymbol + val suffix = + if (sym == defn.BooleanClass) "Z" + else if (sym == defn.ByteClass) "B" + else if (sym == defn.ShortClass) "S" + else if (sym == defn.IntClass) "I" + else if (sym == defn.LongClass) "J" + else if (sym == defn.FloatClass) "F" + else if (sym == defn.DoubleClass) "D" + else if (sym == defn.CharClass) "C" + else "L" + javaSig(elemType) + ";" + ("[" * depth) + suffix + } + def javaSig(tpe: Type): String = tpe match { + case tpe: JavaArrayType => javaArraySig(tpe) + case _ => + // Take the flatten name of the class and the full package name + val pack = tpe.classSymbol.topLevelClass.owner + val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." + packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString + } + + val sym = param.classSymbol + if (sym == defn.BooleanClass) classOf[Boolean] + else if (sym == defn.ByteClass) classOf[Byte] + else if (sym == defn.CharClass) classOf[Char] + else if (sym == defn.ShortClass) classOf[Short] + else if (sym == defn.IntClass) classOf[Int] + else if (sym == defn.LongClass) classOf[Long] + else if (sym == defn.FloatClass) classOf[Float] + else if (sym == defn.DoubleClass) classOf[Double] + else java.lang.Class.forName(javaSig(param), false, classLoader) + } + def getExtraParams(tp: Type): List[Type] = tp.widenDealias match { + case tp: AppliedType if defn.isContextFunctionType(tp) => + // Call context function type direct method + tp.args.init.map(arg => TypeErasure.erasure(arg)) ::: getExtraParams(tp.args.last) + case _ => Nil + } + val extraParams = getExtraParams(sym.info.finalResultType) + val allParams = TypeErasure.erasure(sym.info) match { + case meth: MethodType => meth.paramInfos ::: extraParams + case _ => extraParams + } + allParams.map(paramClass) + } +end Interpreter + +object Interpreter: + /** Exception that stops interpretation if some issue is found */ + class StopInterpretation(val msg: Message, val pos: SrcPos) extends Exception + + object Call: + import tpd._ + /** Matches an expression that is either a field access or an application + * It retruns a TermRef containing field accessed or a method reference and the arguments passed to it. + */ + def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = + Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) + + private object Call0 { + def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match { + case Select(Call0(fn, args), nme.apply) if defn.isContextFunctionType(fn.tpe.widenDealias.finalResultType) => + Some((fn, args)) + case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) + case fn: Select => Some((fn, Nil)) + case Apply(f @ Call0(fn, args1), args2) => + if (f.tpe.widenDealias.hasErasedParams) Some((fn, args1)) + else Some((fn, args2 :: args1)) + case TypeApply(Call0(fn, args), _) => Some((fn, args)) + case _ => None + } + } + end Call + + object MissingClassDefinedInCurrentRun { + def unapply(targetException: Throwable)(using Context): Option[Symbol] = { + if !ctx.compilationUnit.isSuspendable then None + else targetException match + case _: NoClassDefFoundError | _: ClassNotFoundException => + val className = targetException.getMessage + if className eq null then None + else + val sym = staticRef(className.toTypeName).symbol + if (sym.isDefinedInCurrentRun) Some(sym) else None + case _ => None + } + } + + def suspendOnMissing(sym: Symbol, pos: SrcPos)(using Context): Nothing = + if ctx.settings.XprintSuspension.value then + report.echo(i"suspension triggered by a dependency on $sym", pos) + ctx.compilationUnit.suspend() // this throws a SuspendException diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 41f3fd4f64f3..7596549fe401 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -5,6 +5,7 @@ import dotty.tools.dotc.ast.{TreeTypeMap, tpd} import dotty.tools.dotc.config.Printers._ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.Mode import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ @@ -12,7 +13,7 @@ import dotty.tools.dotc.core.tasty.{ PositionPickler, TastyPickler, TastyPrinter import dotty.tools.dotc.core.tasty.DottyUnpickler import dotty.tools.dotc.core.tasty.TreeUnpickler.UnpickleMode import dotty.tools.dotc.report - +import dotty.tools.dotc.reporting.Message import scala.quoted.Quotes import scala.quoted.runtime.impl._ @@ -99,9 +100,9 @@ object PickledQuotes { private def spliceTerms(tree: Tree, typeHole: TypeHole, termHole: ExprHole)(using Context): Tree = { def evaluateHoles = new TreeMap { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match { - case Hole(isTermHole, idx, args, _, _) => + case Hole(isTerm, idx, args, _) => inContext(SpliceScope.contextWithNewSpliceScope(tree.sourcePos)) { - if isTermHole then + if isTerm then val quotedExpr = termHole match case ExprHole.V1(evalHole) => evalHole.nn.apply(idx, reifyExprHoleV1Args(args), QuotesImpl()) @@ -164,7 +165,7 @@ object PickledQuotes { val tree = typeHole match case TypeHole.V1(evalHole) => tdef.rhs match - case TypeBoundsTree(_, Hole(_, idx, args, _, _), _) => + case TypeBoundsTree(_, Hole(_, idx, args, _), _) => // To keep for backwards compatibility. In some older version holes where created in the bounds. val quotedType = evalHole.nn.apply(idx, reifyTypeHoleArgs(args)) PickledQuotes.quotedTypeToTree(quotedType) @@ -172,7 +173,7 @@ object PickledQuotes { // To keep for backwards compatibility. In some older version we missed the creation of some holes. tpt case TypeHole.V2(types) => - val Hole(_, idx, _, _, _) = tdef.rhs: @unchecked + val Hole(_, idx, _, _) = tdef.rhs: @unchecked PickledQuotes.quotedTypeToTree(types.nn.apply(idx)) (tdef.symbol, tree.tpe) }.toMap @@ -220,10 +221,10 @@ object PickledQuotes { treePkl.pickle(tree :: Nil) treePkl.compactify() if tree.span.exists then - val positionWarnings = new mutable.ListBuffer[String]() + val positionWarnings = new mutable.ListBuffer[Message]() val reference = ctx.settings.sourceroot.value - new PositionPickler(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference) - .picklePositions(ctx.compilationUnit.source, tree :: Nil, positionWarnings) + PositionPickler.picklePositions(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference, + ctx.compilationUnit.source, tree :: Nil, positionWarnings) positionWarnings.foreach(report.warning(_)) val pickled = pickler.assembleParts() @@ -248,23 +249,39 @@ object PickledQuotes { case pickled: String => TastyString.unpickle(pickled) case pickled: List[String] => TastyString.unpickle(pickled) - quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never")}") + val unpicklingContext = + if ctx.owner.isClass then + // When a quote is unpickled with a Quotes context that that has a class `spliceOwner` + // we need to use a dummy owner to unpickle it. Otherwise any definitions defined + // in the quoted block would be accidentally entered in the class. + // When splicing this expression, this owner is replaced with the correct owner (see `quotedExprToTree` and `quotedTypeToTree` above). + // On the other hand, if the expression is used as a reflect term, the user must call `changeOwner` (same as with other expressions used within a nested owner). + // `-Xcheck-macros` will check for inconsistent owners and provide the users hints on how to improve them. + // + // Quotes context that that has a class `spliceOwner` can come from a macro annotation + // or a user setting it explicitly using `Symbol.asQuotes`. + ctx.withOwner(newSymbol(ctx.owner, "$quoteOwnedByClass$".toTermName, Private, defn.AnyType, NoSymbol)) + else ctx - val mode = if (isType) UnpickleMode.TypeTree else UnpickleMode.Term - val unpickler = new DottyUnpickler(bytes, mode) - unpickler.enter(Set.empty) + inContext(unpicklingContext) { - val tree = unpickler.tree - QuotesCache(pickled) = tree + quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never")}") - // Make sure trees and positions are fully loaded - new TreeTraverser { - def traverse(tree: Tree)(using Context): Unit = traverseChildren(tree) - }.traverse(tree) + val mode = if (isType) UnpickleMode.TypeTree else UnpickleMode.Term + val unpickler = new DottyUnpickler(bytes, mode) + unpickler.enter(Set.empty) - quotePickling.println(i"**** unpickled quote\n$tree") + val tree = unpickler.tree + QuotesCache(pickled) = tree + + // Make sure trees and positions are fully loaded + tree.foreachSubTree(identity) + + quotePickling.println(i"**** unpickled quote\n$tree") + + tree + } - tree } } diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 00399ecbfd0a..38f2ab347c4c 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -4,13 +4,12 @@ import reporting._ import Diagnostic._ import util.{SourcePosition, NoSourcePosition, SrcPos} import core._ -import Contexts._, Symbols._, Decorators._ +import Contexts._, Flags.*, Symbols._, Decorators._ import config.SourceVersion import ast._ import config.Feature.sourceVersion import java.lang.System.currentTimeMillis - object report: /** For sending messages that are printed only if -verbose is set */ @@ -26,30 +25,18 @@ object report: def deprecationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new DeprecationWarning(msg, pos.sourcePos)) - def deprecationWarning(msg: => String, pos: SrcPos)(using Context): Unit = - deprecationWarning(msg.toMessage, pos) - def migrationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new MigrationWarning(msg, pos.sourcePos)) - def migrationWarning(msg: => String, pos: SrcPos)(using Context): Unit = - migrationWarning(msg.toMessage, pos) - def uncheckedWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new UncheckedWarning(msg, pos.sourcePos)) - def uncheckedWarning(msg: => String, pos: SrcPos)(using Context): Unit = - uncheckedWarning(msg.toMessage, pos) - def featureWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new FeatureWarning(msg, pos.sourcePos)) - def featureWarning(msg: => String, pos: SrcPos)(using Context): Unit = - featureWarning(msg.toMessage, pos) - def featureWarning(feature: String, featureDescription: => String, - featureUseSite: Symbol, required: Boolean, pos: SrcPos)(using Context): Unit = { - val req = if (required) "needs to" else "should" + featureUseSite: Symbol, required: Boolean, pos: SrcPos)(using Context): Unit = + val req = if required then "needs to" else "should" val fqname = s"scala.language.$feature" val explain = @@ -60,47 +47,48 @@ object report: |See the Scala docs for value $fqname for a discussion |why the feature $req be explicitly enabled.""".stripMargin - def msg = s"""$featureDescription $req be enabled - |by adding the import clause 'import $fqname' - |or by setting the compiler option -language:$feature.$explain""".stripMargin - if (required) error(msg, pos) - else issueWarning(new FeatureWarning(msg.toMessage, pos.sourcePos)) - } + def msg = em"""$featureDescription $req be enabled + |by adding the import clause 'import $fqname' + |or by setting the compiler option -language:$feature.$explain""" + if required then error(msg, pos) + else issueWarning(new FeatureWarning(msg, pos.sourcePos)) + end featureWarning def warning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new Warning(msg, addInlineds(pos))) + def warning(msg: Message)(using Context): Unit = + warning(msg, NoSourcePosition) + def warning(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = warning(msg.toMessage, pos) - def error(msg: Message, pos: SrcPos)(using Context): Unit = + def error(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = val fullPos = addInlineds(pos) ctx.reporter.report(new Error(msg, fullPos)) if ctx.settings.YdebugError.value then Thread.dumpStack() - def error(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = + def error(msg: => String, pos: SrcPos)(using Context): Unit = error(msg.toMessage, pos) + def error(msg: => String)(using Context): Unit = + error(msg, NoSourcePosition) + def error(ex: TypeError, pos: SrcPos)(using Context): Unit = val fullPos = addInlineds(pos) ctx.reporter.report(new StickyError(ex.toMessage, fullPos)) if ctx.settings.YdebugError.value then Thread.dumpStack() + if ctx.settings.YdebugTypeError.value then ex.printStackTrace() def errorOrMigrationWarning(msg: Message, pos: SrcPos, from: SourceVersion)(using Context): Unit = if sourceVersion.isAtLeast(from) then if sourceVersion.isMigrating && sourceVersion.ordinal <= from.ordinal then migrationWarning(msg, pos) else error(msg, pos) - def errorOrMigrationWarning(msg: => String, pos: SrcPos, from: SourceVersion)(using Context): Unit = - errorOrMigrationWarning(msg.toMessage, pos, from) - def gradualErrorOrMigrationWarning(msg: Message, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = if sourceVersion.isAtLeast(errorFrom) then errorOrMigrationWarning(msg, pos, errorFrom) else if sourceVersion.isAtLeast(warnFrom) then warning(msg, pos) - def gradualErrorOrMigrationWarning(msg: => String, pos: SrcPos, warnFrom: SourceVersion, errorFrom: SourceVersion)(using Context): Unit = - gradualErrorOrMigrationWarning(msg.toMessage, pos, warnFrom, errorFrom) - def restrictionError(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit = error(msg.mapMsg("Implementation restriction: " + _), pos) @@ -140,4 +128,64 @@ object report: case Nil => pos recur(pos.sourcePos, tpd.enclosingInlineds) + private object messageRendering extends MessageRendering + + // Should only be called from Run#enrichErrorMessage. + def enrichErrorMessage(errorMessage: String)(using Context): String = try { + def formatExplain(pairs: List[(String, Any)]) = pairs.map((k, v) => f"$k%20s: $v").mkString("\n") + + val settings = ctx.settings.userSetSettings(ctx.settingsState).sortBy(_.name) + val tree = ctx.tree + val sym = tree.symbol + val pos = tree.sourcePos + val path = pos.source.path + val site = ctx.outersIterator.map(_.owner).filter(sym => !sym.exists || sym.isClass || sym.is(Method)).next() + + import untpd.* + extension (tree: Tree) def summaryString: String = tree match + case Literal(const) => s"Literal($const)" + case Ident(name) => s"Ident(${name.decode})" + case Select(qual, name) => s"Select(${qual.summaryString}, ${name.decode})" + case tree: NameTree => (if tree.isType then "type " else "") + tree.name.decode + case tree => s"${tree.className}${if tree.symbol.exists then s"(${tree.symbol})" else ""}" + + val info1 = formatExplain(List( + "while compiling" -> ctx.compilationUnit, + "during phase" -> ctx.phase.prevMega, + "mode" -> ctx.mode, + "library version" -> scala.util.Properties.versionString, + "compiler version" -> dotty.tools.dotc.config.Properties.versionString, + "settings" -> settings.map(s => if s.value == "" then s"${s.name} \"\"" else s"${s.name} ${s.value}").mkString(" "), + )) + val symbolInfos = if sym eq NoSymbol then List("symbol" -> sym) else List( + "symbol" -> sym.showLocated, + "symbol definition" -> s"${sym.showDcl} (a ${sym.className})", + "symbol package" -> sym.enclosingPackageClass.fullName, + "symbol owners" -> sym.showExtendedLocation, + ) + val info2 = formatExplain(List( + "tree" -> tree.summaryString, + "tree position" -> (if pos.exists then s"$path:${pos.line + 1}:${pos.column}" else s"$path:"), + "tree type" -> tree.typeOpt.show, + ) ::: symbolInfos ::: List( + "call site" -> s"${site.showLocated} in ${site.enclosingPackageClass}" + )) + val context_s = try + s""" == Source file context for tree position == + | + |${messageRendering.messageAndPos(Diagnostic.Error("", pos))}""".stripMargin + catch case _: Exception => "" + s""" + | $errorMessage + | + | An unhandled exception was thrown in the compiler. + | Please file a crash report here: + | https://github.com/lampepfl/dotty/issues/new/choose + | + |$info1 + | + |$info2 + | + |$context_s""".stripMargin + } catch case _: Throwable => errorMessage // don't introduce new errors trying to report errors, so swallow exceptions end report diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala index a92da7821fab..624aa93924e8 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala @@ -9,7 +9,7 @@ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING} import dotty.tools.dotc.util.SourcePosition -import java.util.Optional +import java.util.{Collections, Optional, List => JList} import scala.util.chaining._ import core.Decorators.toMessage @@ -89,7 +89,7 @@ class Diagnostic( val msg: Message, val pos: SourcePosition, val level: Int -) extends Exception with interfaces.Diagnostic: +) extends interfaces.Diagnostic: private var verbose: Boolean = false def isVerbose: Boolean = verbose def setVerbose(): this.type = @@ -100,7 +100,8 @@ class Diagnostic( if (pos.exists && pos.source.exists) Optional.of(pos) else Optional.empty() override def message: String = msg.message.replaceAll("\u001B\\[[;\\d]*m", "") + override def diagnosticRelatedInformation: JList[interfaces.DiagnosticRelatedInformation] = + Collections.emptyList() override def toString: String = s"$getClass at $pos: $message" - override def getMessage(): String = message end Diagnostic diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index d9140a6309b8..fc679210db17 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -176,7 +176,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case JavaEnumParentArgsID // errorNumber: 160 case AlreadyDefinedID // errorNumber: 161 case CaseClassInInlinedCodeID // errorNumber: 162 - case OverrideTypeMismatchErrorID // errorNumber: 163 + case OverrideTypeMismatchErrorID extends ErrorMessageID(isActive = false) // errorNumber: 163 case OverrideErrorID // errorNumber: 164 case MatchableWarningID // errorNumber: 165 case CannotExtendFunctionID // errorNumber: 166 @@ -185,6 +185,16 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case TargetNameOnTopLevelClassID // errorNumber: 169 case NotClassTypeID // errorNumber 170 case MissingArgumentID // errorNumer 171 + case MissingImplicitArgumentID // errorNumber 172 + case CannotBeAccessedID // errorNumber 173 + case InlineGivenShouldNotBeFunctionID // errorNumber 174 + case ValueDiscardingID // errorNumber 175 + case UnusedNonUnitValueID // errorNumber 176 + case ConstrProxyShadowsID // errorNumber 177 + case MissingArgumentListID // errorNumber: 178 + case MatchTypeScrutineeCannotBeHigherKindedID // errorNumber: 179 + case AmbiguousExtensionMethodID // errorNumber 180 + case UnqualifiedCallToAnyRefMethodID // errorNumber: 181 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/Message.scala b/compiler/src/dotty/tools/dotc/reporting/Message.scala index 9e397d606491..a1fe6773c1d2 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Message.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Message.scala @@ -2,17 +2,35 @@ package dotty.tools package dotc package reporting -import core.Contexts.*, core.Decorators.*, core.Mode +import core.* +import Contexts.*, Decorators.*, Symbols.*, Types.*, Flags.* +import printing.{RefinedPrinter, MessageLimiter, ErrorMessageLimiter} +import printing.Texts.Text +import printing.Formatting.hl import config.SourceVersion import scala.language.unsafeNulls import scala.annotation.threadUnsafe -object Message { - val nonSensicalStartTag: String = "" - val nonSensicalEndTag: String = "" - +/** ## Tips for error message generation + * + * - You can use the `em` interpolator for error messages. It's defined in core.Decorators. + * - You can also use a simple string argument for `error` or `warning` (not for the other variants), + * but the string should not be interpolated or composed of objects that require a + * Context for evaluation. + * - When embedding interpolated substrings defined elsewhere in error messages, + * use `i` and make sure they are defined as def's instead of vals. That way, the + * possibly expensive interpolation will performed only in the case where the message + * is eventually printed. Note: At least during typer, it's common for messages + * to be discarded without being printed. Also, by making them defs, you ensure that + * they will be evaluated in the Message context, which makes formatting safer + * and more robust. + * - For common messages, or messages that might require explanation, prefer defining + * a new `Message` class in file `messages.scala` and use that instead. The advantage is that these + * messages have unique IDs that can be referenced elsewhere. + */ +object Message: def rewriteNotice(what: String, version: SourceVersion | Null = null, options: String = "")(using Context): String = if !ctx.mode.is(Mode.Interactive) then val sourceStr = if version != null then i"-source $version" else "" @@ -22,7 +40,188 @@ object Message { else i"$sourceStr $options" i"\n$what can be rewritten automatically under -rewrite $optionStr." else "" -} + + private type Recorded = Symbol | ParamRef | SkolemType + + private case class SeenKey(str: String, isType: Boolean) + + /** A class that records printed items of one of the types in `Recorded`, + * adds superscripts for disambiguations, and can explain recorded symbols + * in ` where` clause + */ + private class Seen(disambiguate: Boolean): + + val seen = new collection.mutable.HashMap[SeenKey, List[Recorded]]: + override def default(key: SeenKey) = Nil + + var nonSensical = false + + /** If false, stop all recordings */ + private var recordOK = disambiguate + + /** Clear all entries and stop further entries to be added */ + def disable() = + seen.clear() + recordOK = false + + /** Record an entry `entry` with given String representation `str` and a + * type/term namespace identified by `isType`. + * If the entry was not yet recorded, allocate the next superscript corresponding + * to the same string in the same name space. The first recording is the string proper + * and following recordings get consecutive superscripts starting with 2. + * @return The possibly superscripted version of `str`. + */ + def record(str: String, isType: Boolean, entry: Recorded)(using Context): String = + if !recordOK then return str + //println(s"recording $str, $isType, $entry") + + /** If `e1` is an alias of another class of the same name, return the other + * class symbol instead. This normalization avoids recording e.g. scala.List + * and scala.collection.immutable.List as two different types + */ + def followAlias(e1: Recorded): Recorded = e1 match { + case e1: Symbol if e1.isAliasType => + val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol + if (underlying.name == e1.name) underlying else e1 + case _ => e1 + } + val key = SeenKey(str, isType) + val existing = seen(key) + lazy val dealiased = followAlias(entry) + + // alts: The alternatives in `existing` that are equal, or follow (an alias of) `entry` + var alts = existing.dropWhile(alt => dealiased ne followAlias(alt)) + if alts.isEmpty then + alts = entry :: existing + seen(key) = alts + + val suffix = alts.length match { + case 1 => "" + case n => n.toString.toCharArray.map { + case '0' => '⁰' + case '1' => '¹' + case '2' => '²' + case '3' => '³' + case '4' => '⁴' + case '5' => '⁵' + case '6' => '⁶' + case '7' => '⁷' + case '8' => '⁸' + case '9' => '⁹' + }.mkString + } + str + suffix + end record + + /** Create explanation for single `Recorded` type or symbol */ + private def explanation(entry: AnyRef)(using Context): String = + def boundStr(bound: Type, default: ClassSymbol, cmp: String) = + if (bound.isRef(default)) "" else i"$cmp $bound" + + def boundsStr(bounds: TypeBounds): String = { + val lo = boundStr(bounds.lo, defn.NothingClass, ">:") + val hi = boundStr(bounds.hi, defn.AnyClass, "<:") + if (lo.isEmpty) hi + else if (hi.isEmpty) lo + else s"$lo and $hi" + } + + def addendum(cat: String, info: Type): String = info match { + case bounds @ TypeBounds(lo, hi) if bounds ne TypeBounds.empty => + if (lo eq hi) i" which is an alias of $lo" + else i" with $cat ${boundsStr(bounds)}" + case _ => + "" + } + + entry match { + case param: TypeParamRef => + s"is a type variable${addendum("constraint", TypeComparer.bounds(param))}" + case param: TermParamRef => + s"is a reference to a value parameter" + case sym: Symbol => + val info = + if (ctx.gadt.contains(sym)) + sym.info & ctx.gadt.fullBounds(sym) + else + sym.info + s"is a ${ctx.printer.kindString(sym)}${sym.showExtendedLocation}${addendum("bounds", info)}" + case tp: SkolemType => + s"is an unknown value of type ${tp.widen.show}" + } + end explanation + + /** Produce a where clause with explanations for recorded iterms. + */ + def explanations(using Context): String = + def needsExplanation(entry: Recorded) = entry match { + case param: TypeParamRef => ctx.typerState.constraint.contains(param) + case param: ParamRef => false + case skolem: SkolemType => true + case sym: Symbol => + ctx.gadt.contains(sym) && ctx.gadt.fullBounds(sym) != TypeBounds.empty + } + + val toExplain: List[(String, Recorded)] = seen.toList.flatMap { kvs => + val res: List[(String, Recorded)] = kvs match { + case (key, entry :: Nil) => + if (needsExplanation(entry)) (key.str, entry) :: Nil else Nil + case (key, entries) => + for (alt <- entries) yield { + val tickedString = record(key.str, key.isType, alt) + (tickedString, alt) + } + } + res // help the inferrencer out + }.sortBy(_._1) + + def columnar(parts: List[(String, String)]): List[String] = { + lazy val maxLen = parts.map(_._1.length).max + parts.map { + case (leader, trailer) => + val variable = hl(leader) + s"""$variable${" " * (maxLen - leader.length)} $trailer""" + } + } + + val explainParts = toExplain.map { case (str, entry) => (str, explanation(entry)) } + val explainLines = columnar(explainParts) + if (explainLines.isEmpty) "" else i"where: $explainLines%\n %\n" + end explanations + end Seen + + /** Printer to be used when formatting messages */ + private class Printer(val seen: Seen, _ctx: Context) extends RefinedPrinter(_ctx): + + /** True if printer should a show source module instead of its module class */ + private def useSourceModule(sym: Symbol): Boolean = + sym.is(ModuleClass, butNot = Package) && sym.sourceModule.exists && !_ctx.settings.YdebugNames.value + + override def simpleNameString(sym: Symbol): String = + if useSourceModule(sym) then simpleNameString(sym.sourceModule) + else seen.record(super.simpleNameString(sym), sym.isType, sym) + + override def ParamRefNameString(param: ParamRef): String = + seen.record(super.ParamRefNameString(param), param.isInstanceOf[TypeParamRef], param) + + override def toTextRef(tp: SingletonType): Text = tp match + case tp: SkolemType => seen.record(tp.repr.toString, isType = true, tp) + case _ => super.toTextRef(tp) + + override def toText(tp: Type): Text = + if !tp.exists || tp.isErroneous then seen.nonSensical = true + tp match + case tp: TypeRef if useSourceModule(tp.symbol) => Str("object ") ~ super.toText(tp) + case _ => super.toText(tp) + + override def toText(sym: Symbol): Text = + sym.infoOrCompleter match + case _: ErrorType | TypeAlias(_: ErrorType) | NoType => seen.nonSensical = true + case _ => + super.toText(sym) + end Printer + +end Message /** A `Message` contains all semantic information necessary to easily * comprehend what caused the message to be logged. Each message can be turned @@ -39,9 +238,41 @@ object Message { * * @param errorId a unique id identifying the message, this will be * used to reference documentation online + * + * Messages modify the rendendering of interpolated strings in several ways: + * + * 1. The size of the printed code is limited with a MessafeLimiter. If the message + * would get too large or too deeply nested, a `...` is printed instead. + * 2. References to module classes are prefixed with `object ` for better recogniability. + * 3. A where clause is sometimes added which contains the following additional explanations: + * - Rerences are disambiguated: If a message contains occurrences of the same identifier + * representing different symbols, the duplicates are printed with superscripts + * and the where-clause explains where each symbol is located. + * - Uninstantiated variables are explained in the where-clause with additional + * info about their bounds. + * - Skolems are explained with additional info about their underlying type. + * + * Messages inheriting from the NoDisambiguation trait or returned from the + * `noDisambiguation()` method skip point (3) above. This makes sense if the + * message already exolains where different occurrences of the same identifier + * are located. Examples are NamingMsgs such as double definition errors, + * overriding errors, and ambiguous implicit errors. + * + * We consciously made the design decision to disambiguate by default and disable + * disambiguation as an opt-in. The reason is that one usually does not consider all + * fine-grained details when writing an error message. If disambiguation is the default, + * some tests will show where clauses that look too noisy and that then can be disabled + * when needed. But if silence is the default, one usually does not realize that + * better info could be obtained by turning disambiguation on. */ -abstract class Message(val errorId: ErrorMessageID) { self => - import Message._ +abstract class Message(val errorId: ErrorMessageID)(using Context) { self => + import Message.* + + /** The kind of the error message, e.g. "Syntax" or "Type Mismatch". + * This will be printed as "$kind Error", "$kind Warning", etc, on the first + * line of the message. + */ + def kind: MessageKind /** The `msg` contains the diagnostic message e.g: * @@ -52,22 +283,27 @@ abstract class Message(val errorId: ErrorMessageID) { self => * `Diagnostic`. The message is given in raw form, with possible embedded * tags. */ - protected def msg: String - - /** The kind of the error message, e.g. "Syntax" or "Type Mismatch". - * This will be printed as "$kind Error", "$kind Warning", etc, on the first - * line of the message. - */ - def kind: MessageKind + protected def msg(using Context): String /** The explanation should provide a detailed description of why the error * occurred and use examples from the user's own code to illustrate how to * avoid these errors. It might contain embedded tags. */ - protected def explain: String + protected def explain(using Context): String - /** A message suffix that can be added for certain subclasses */ - protected def msgSuffix: String = "" + /** What gets printed after the message proper */ + protected def msgPostscript(using Context): String = + if ctx eq NoContext then "" + else ctx.printer match + case msgPrinter: Message.Printer => + myIsNonSensical = msgPrinter.seen.nonSensical + val addendum = msgPrinter.seen.explanations + msgPrinter.seen.disable() + // Clear entries and stop futher recording so that messages containing the current + // one don't repeat the explanations or use explanations from the msgPostscript. + if addendum.isEmpty then "" else "\n\n" ++ addendum + case _ => + "" /** Does this message have an explanation? * This is normally the same as `explain.nonEmpty` but can be overridden @@ -76,61 +312,69 @@ abstract class Message(val errorId: ErrorMessageID) { self => */ def canExplain: Boolean = explain.nonEmpty - private var myMsg: String | Null = null private var myIsNonSensical: Boolean = false - private def dropNonSensical(msg: String): String = - if msg.contains(nonSensicalStartTag) then - myIsNonSensical = true - // myMsg might be composed of several d"..." invocations -> nested - // nonsensical tags possible - msg - .replace(nonSensicalStartTag, "") - .replace(nonSensicalEndTag, "") - else msg + /** A message is non-sensical if it contains references to internally + * generated error types. Normally we want to suppress error messages + * referring to types like this because they look weird and are normally + * follow-up errors to something that was diagnosed before. + */ + def isNonSensical: Boolean = { message; myIsNonSensical } + + private var disambiguate: Boolean = true + + def withoutDisambiguation(): this.type = + disambiguate = false + this - /** The message with potential embedded tags */ - def rawMessage = message + private def inMessageContext(disambiguate: Boolean)(op: Context ?=> String): String = + if ctx eq NoContext then op + else + val msgContext = ctx.printer match + case _: Message.Printer => ctx + case _ => + val seen = Seen(disambiguate) + val ctx1 = ctx.fresh.setPrinterFn(Message.Printer(seen, _)) + if !ctx1.property(MessageLimiter).isDefined then + ctx1.setProperty(MessageLimiter, ErrorMessageLimiter()) + ctx1 + op(using msgContext) /** The message to report. tags are filtered out */ - @threadUnsafe lazy val message: String = dropNonSensical(msg + msgSuffix) + @threadUnsafe lazy val message: String = + inMessageContext(disambiguate)(msg + msgPostscript) /** The explanation to report. tags are filtered out */ - @threadUnsafe lazy val explanation: String = dropNonSensical(explain) - - /** A message is non-sensical if it contains references to - * tags. Such tags are inserted by the error diagnostic framework if a - * message contains references to internally generated error types. Normally - * we want to suppress error messages referring to types like this because - * they look weird and are normally follow-up errors to something that was - * diagnosed before. - */ - def isNonSensical: Boolean = { message; myIsNonSensical } + @threadUnsafe lazy val explanation: String = + inMessageContext(disambiguate = false)(explain) /** The implicit `Context` in messages is a large thing that we don't want * persisted. This method gets around that by duplicating the message, * forcing its `msg` and `explanation` vals and dropping the implicit context * that was captured in the original message. */ - def persist: Message = new Message(errorId) { - val kind = self.kind - val msg = self.msg - val explain = self.explain + def persist: Message = new Message(errorId)(using NoContext): + val kind = self.kind + private val persistedMsg = self.message + private val persistedExplain = self.explanation + def msg(using Context) = persistedMsg + def explain(using Context) = persistedExplain override val canExplain = self.canExplain - } + override def isNonSensical = self.isNonSensical def append(suffix: => String): Message = mapMsg(_ ++ suffix) + def prepend(prefix: => String): Message = mapMsg(prefix ++ _) def mapMsg(f: String => String): Message = new Message(errorId): - val kind = self.kind - def msg = f(self.msg) - def explain = self.explain + val kind = self.kind + def msg(using Context) = f(self.msg) + def explain(using Context) = self.explain override def canExplain = self.canExplain def appendExplanation(suffix: => String): Message = new Message(errorId): - val kind = self.kind - def msg = self.msg - def explain = self.explain ++ suffix + val kind = self.kind + def msg(using Context) = self.msg + def explain(using Context) = self.explain ++ suffix override def canExplain = true /** Override with `true` for messages that should always be shown even if their @@ -143,10 +387,14 @@ abstract class Message(val errorId: ErrorMessageID) { self => override def toString = msg } +/** A marker trait that suppresses generation of `where` clause for disambiguations */ +trait NoDisambiguation extends Message: + withoutDisambiguation() + /** The fallback `Message` containing no explanation and having no `kind` */ -class NoExplanation(msgFn: => String) extends Message(ErrorMessageID.NoExplanationID) { - def msg: String = msgFn - def explain: String = "" +final class NoExplanation(msgFn: Context ?=> String)(using Context) extends Message(ErrorMessageID.NoExplanationID) { + def msg(using Context): String = msgFn + def explain(using Context): String = "" val kind: MessageKind = MessageKind.NoKind override def toString(): String = msg diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index 497e77ae4a7c..f5aadac27296 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -14,7 +14,7 @@ import dotty.tools.dotc.util.NoSourcePosition import java.io.{BufferedReader, PrintWriter} import scala.annotation.internal.sharable import scala.collection.mutable -import core.Decorators.toMessage +import core.Decorators.em object Reporter { /** Convert a SimpleReporter into a real Reporter */ @@ -218,8 +218,8 @@ abstract class Reporter extends interfaces.ReporterResult { def summarizeUnreportedWarnings()(using Context): Unit = for (settingName, count) <- unreportedWarnings do val were = if count == 1 then "was" else "were" - val msg = s"there $were ${countString(count, settingName.tail + " warning")}; re-run with $settingName for details" - report(Warning(msg.toMessage, NoSourcePosition)) + val msg = em"there $were ${countString(count, settingName.tail + " warning")}; re-run with $settingName for details" + report(Warning(msg, NoSourcePosition)) /** Print the summary of warnings and errors */ def printSummary()(using Context): Unit = { diff --git a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala index ad47a9d30536..153212522541 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala @@ -6,12 +6,16 @@ import core.Contexts._ import Diagnostic.Error /** - * This class implements a Reporter that throws all errors and sends warnings and other - * info to the underlying reporter. + * This class implements a Reporter that throws all errors as UnhandledError exceptions + * and sends warnings and other info to the underlying reporter. */ class ThrowingReporter(reportInfo: Reporter) extends Reporter { def doReport(dia: Diagnostic)(using Context): Unit = dia match { - case _: Error => throw dia + case dia: Error => throw UnhandledError(dia) case _ => reportInfo.doReport(dia) } } + +class UnhandledError(val diagnostic: Error) extends Exception: + override def getMessage = diagnostic.message + diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index 21e10e894e0b..af1a5c0f0f47 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -18,7 +18,7 @@ enum MessageFilter: case Feature => message.isInstanceOf[Diagnostic.FeatureWarning] case Unchecked => message.isInstanceOf[Diagnostic.UncheckedWarning] case MessagePattern(pattern) => - val noHighlight = message.msg.rawMessage.replaceAll("\\e\\[[\\d;]*[^\\d;]","") + val noHighlight = message.msg.message.replaceAll("\\e\\[[\\d;]*[^\\d;]","") pattern.findFirstIn(noHighlight).nonEmpty case MessageID(errorId) => message.msg.errorId == errorId case None => false diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 56375d881f97..d205b816214c 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -15,9 +15,10 @@ import printing.Formatting import ErrorMessageID._ import ast.Trees import config.{Feature, ScalaVersion} -import typer.ErrorReporting.{err, matchReductionAddendum} +import typer.ErrorReporting.{err, matchReductionAddendum, substitutableTypeSymbolsInScope} import typer.ProtoTypes.ViewProto -import typer.Implicits.Candidate +import typer.Implicits.* +import typer.Inferencing import scala.util.control.NonFatal import StdNames.nme import printing.Formatting.hl @@ -25,6 +26,8 @@ import ast.Trees._ import ast.untpd import ast.tpd import transform.SymUtils._ +import scala.util.matching.Regex +import java.util.regex.Matcher.quoteReplacement import cc.CaptureSet.IdentityCaptRefMap /** Messages @@ -40,211 +43,212 @@ import cc.CaptureSet.IdentityCaptRefMap * ``` */ - abstract class SyntaxMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Syntax +abstract class SyntaxMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Syntax - abstract class TypeMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Type +abstract class TypeMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Type - trait ShowMatchTrace(tps: Type*)(using Context) extends Message: - override def msgSuffix: String = matchReductionAddendum(tps*) +trait ShowMatchTrace(tps: Type*)(using Context) extends Message: + override def msgPostscript(using Context): String = + super.msgPostscript ++ matchReductionAddendum(tps*) - abstract class TypeMismatchMsg(found: Type, expected: Type)(errorId: ErrorMessageID)(using Context) - extends Message(errorId), ShowMatchTrace(found, expected): - def kind = MessageKind.TypeMismatch - def explain = err.whyNoMatchStr(found, expected) - override def canExplain = true +abstract class TypeMismatchMsg(found: Type, expected: Type)(errorId: ErrorMessageID)(using Context) +extends Message(errorId), ShowMatchTrace(found, expected): + def kind = MessageKind.TypeMismatch + def explain(using Context) = err.whyNoMatchStr(found, expected) + override def canExplain = true - abstract class NamingMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Naming +abstract class NamingMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId), NoDisambiguation: + def kind = MessageKind.Naming - abstract class DeclarationMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Declaration +abstract class DeclarationMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Declaration - /** A simple not found message (either for idents, or member selection. - * Messages of this class are sometimes dropped in favor of other, more - * specific messages. - */ - abstract class NotFoundMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.NotFound - def name: Name - - abstract class PatternMatchMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.PatternMatch +/** A simple not found message (either for idents, or member selection. + * Messages of this class are sometimes dropped in favor of other, more + * specific messages. + */ +abstract class NotFoundMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.NotFound + def name: Name - abstract class CyclicMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Cyclic +abstract class PatternMatchMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.PatternMatch - abstract class ReferenceMsg(errorId: ErrorMessageID) extends Message(errorId): - def kind = MessageKind.Reference +abstract class CyclicMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Cyclic - abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: ErrorMessageID)(using Context) - extends SyntaxMsg(errNo) { - def explain = { - val tryString = tryBody match { - case Block(Nil, untpd.EmptyTree) => "{}" - case _ => tryBody.show - } +abstract class ReferenceMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): + def kind = MessageKind.Reference - val code1 = - s"""|import scala.util.control.NonFatal - | - |try $tryString catch { - | case NonFatal(e) => ??? - |}""".stripMargin - - val code2 = - s"""|try $tryString finally { - | // perform your cleanup here! - |}""".stripMargin - - em"""|A ${hl("try")} expression should be followed by some mechanism to handle any exceptions - |thrown. Typically a ${hl("catch")} expression follows the ${hl("try")} and pattern matches - |on any expected exceptions. For example: - | - |$code1 - | - |It is also possible to follow a ${hl("try")} immediately by a ${hl("finally")} - letting the - |exception propagate - but still allowing for some clean up in ${hl("finally")}: - | - |$code2 - | - |It is recommended to use the ${hl("NonFatal")} extractor to catch all exceptions as it - |correctly handles transfer functions like ${hl("return")}.""" +abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: ErrorMessageID)(using Context) +extends SyntaxMsg(errNo) { + def explain(using Context) = { + val tryString = tryBody match { + case Block(Nil, untpd.EmptyTree) => "{}" + case _ => tryBody.show } - } - - class EmptyCatchBlock(tryBody: untpd.Tree)(using Context) - extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchBlockID) { - def msg = - em"""|The ${hl("catch")} block does not contain a valid expression, try - |adding a case like - ${hl("case e: Exception =>")} to the block""" - } - - class EmptyCatchAndFinallyBlock(tryBody: untpd.Tree)(using Context) - extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID) { - def msg = - em"""|A ${hl("try")} without ${hl("catch")} or ${hl("finally")} is equivalent to putting - |its body in a block; no exceptions are handled.""" - } - class DeprecatedWithOperator()(using Context) - extends SyntaxMsg(DeprecatedWithOperatorID) { - def msg = - em"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead""" - def explain = - em"""|Dotty introduces intersection types - ${hl("&")} types. These replace the - |use of the ${hl("with")} keyword. There are a few differences in - |semantics between intersection types and using ${hl("with")}.""" - } - - class CaseClassMissingParamList(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(CaseClassMissingParamListID) { - def msg = - em"""|A ${hl("case class")} must have at least one parameter list""" - - def explain = - em"""|${cdef.name} must have at least one parameter list, if you would rather - |have a singleton representation of ${cdef.name}, use a "${hl("case object")}". - |Or, add an explicit ${hl("()")} as a parameter list to ${cdef.name}.""" - } - - class AnonymousFunctionMissingParamType(param: untpd.ValDef, - tree: untpd.Function, - pt: Type) - (using Context) - extends TypeMsg(AnonymousFunctionMissingParamTypeID) { - def msg = { - val ofFun = - if param.name.is(WildcardParamName) - || (MethodType.syntheticParamNames(tree.args.length + 1) contains param.name) - then i" of expanded function:\n$tree" - else "" + val code1 = + s"""|import scala.util.control.NonFatal + | + |try $tryString catch { + | case NonFatal(e) => ??? + |}""".stripMargin - val inferred = - if (pt == WildcardType) "" - else i"\nWhat I could infer was: $pt" + val code2 = + s"""|try $tryString finally { + | // perform your cleanup here! + |}""".stripMargin - i"""Missing parameter type - | - |I could not infer the type of the parameter ${param.name}$ofFun.$inferred""" - } + i"""|A ${hl("try")} expression should be followed by some mechanism to handle any exceptions + |thrown. Typically a ${hl("catch")} expression follows the ${hl("try")} and pattern matches + |on any expected exceptions. For example: + | + |$code1 + | + |It is also possible to follow a ${hl("try")} immediately by a ${hl("finally")} - letting the + |exception propagate - but still allowing for some clean up in ${hl("finally")}: + | + |$code2 + | + |It is recommended to use the ${hl("NonFatal")} extractor to catch all exceptions as it + |correctly handles transfer functions like ${hl("return")}.""" + } +} + +class EmptyCatchBlock(tryBody: untpd.Tree)(using Context) +extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchBlockID) { + def msg(using Context) = + i"""|The ${hl("catch")} block does not contain a valid expression, try + |adding a case like - ${hl("case e: Exception =>")} to the block""" +} + +class EmptyCatchAndFinallyBlock(tryBody: untpd.Tree)(using Context) +extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID) { + def msg(using Context) = + i"""|A ${hl("try")} without ${hl("catch")} or ${hl("finally")} is equivalent to putting + |its body in a block; no exceptions are handled.""" +} + +class DeprecatedWithOperator()(using Context) +extends SyntaxMsg(DeprecatedWithOperatorID) { + def msg(using Context) = + i"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead""" + def explain(using Context) = + i"""|Dotty introduces intersection types - ${hl("&")} types. These replace the + |use of the ${hl("with")} keyword. There are a few differences in + |semantics between intersection types and using ${hl("with")}.""" +} + +class CaseClassMissingParamList(cdef: untpd.TypeDef)(using Context) +extends SyntaxMsg(CaseClassMissingParamListID) { + def msg(using Context) = + i"""|A ${hl("case class")} must have at least one parameter list""" + + def explain(using Context) = + i"""|${cdef.name} must have at least one parameter list, if you would rather + |have a singleton representation of ${cdef.name}, use a "${hl("case object")}". + |Or, add an explicit ${hl("()")} as a parameter list to ${cdef.name}.""" +} + +class AnonymousFunctionMissingParamType(param: untpd.ValDef, + tree: untpd.Function, + pt: Type) + (using Context) +extends TypeMsg(AnonymousFunctionMissingParamTypeID) { + def msg(using Context) = { + val ofFun = + if param.name.is(WildcardParamName) + || (MethodType.syntheticParamNames(tree.args.length + 1) contains param.name) + then i" of expanded function:\n$tree" + else "" - def explain = "" - } + val inferred = + if (pt == WildcardType) "" + else i"\nWhat I could infer was: $pt" - class WildcardOnTypeArgumentNotAllowedOnNew()(using Context) - extends SyntaxMsg(WildcardOnTypeArgumentNotAllowedOnNewID) { - def msg = "Type argument must be fully defined" - def explain = - val code1: String = - """ - |object TyperDemo { - | class Team[A] - | val team = new Team[?] - |} - """.stripMargin + i"""Missing parameter type + | + |I could not infer the type of the parameter ${param.name}$ofFun.$inferred""" + } + + def explain(using Context) = "" +} + +class WildcardOnTypeArgumentNotAllowedOnNew()(using Context) +extends SyntaxMsg(WildcardOnTypeArgumentNotAllowedOnNewID) { + def msg(using Context) = "Type argument must be fully defined" + def explain(using Context) = + val code1: String = + """ + |object TyperDemo { + | class Team[A] + | val team = new Team[?] + |} + """.stripMargin - val code2: String = - """ - |object TyperDemo { - | class Team[A] - | val team = new Team[Int] - |} - """.stripMargin - em"""|Wildcard on arguments is not allowed when declaring a new type. - | - |Given the following example: - | - |$code1 - | - |You must complete all the type parameters, for instance: - | - |$code2 """ - } + val code2: String = + """ + |object TyperDemo { + | class Team[A] + | val team = new Team[Int] + |} + """.stripMargin + i"""|Wildcard on arguments is not allowed when declaring a new type. + | + |Given the following example: + | + |$code1 + | + |You must complete all the type parameters, for instance: + | + |$code2 """ +} - // Type Errors ------------------------------------------------------------ // - class DuplicateBind(bind: untpd.Bind, tree: untpd.CaseDef)(using Context) - extends NamingMsg(DuplicateBindID) { - def msg = em"duplicate pattern variable: ${bind.name}" +// Type Errors ------------------------------------------------------------ // +class DuplicateBind(bind: untpd.Bind, tree: untpd.CaseDef)(using Context) +extends NamingMsg(DuplicateBindID) { + def msg(using Context) = i"duplicate pattern variable: ${bind.name}" - def explain = { - val pat = tree.pat.show - val guard = tree.guard match { - case untpd.EmptyTree => "" - case guard => s"if ${guard.show}" - } + def explain(using Context) = { + val pat = tree.pat.show + val guard = tree.guard match + case untpd.EmptyTree => "" + case guard => s"if ${guard.show}" - val body = tree.body match { - case Block(Nil, untpd.EmptyTree) => "" - case body => s" ${body.show}" - } + val body = tree.body match { + case Block(Nil, untpd.EmptyTree) => "" + case body => s" ${body.show}" + } - val caseDef = s"case $pat$guard => $body" + val caseDef = s"case $pat$guard => $body" - em"""|For each ${hl("case")} bound variable names have to be unique. In: - | - |$caseDef - | - |${bind.name} is not unique. Rename one of the bound variables!""" - } + i"""|For each ${hl("case")} bound variable names have to be unique. In: + | + |$caseDef + | + |${bind.name} is not unique. Rename one of the bound variables!""" } +} - class MissingIdent(tree: untpd.Ident, treeKind: String, val name: Name)(using Context) - extends NotFoundMsg(MissingIdentID) { - def msg = em"Not found: $treeKind$name" - def explain = { - em"""|The identifier for `$treeKind$name` is not bound, that is, - |no declaration for this identifier can be found. - |That can happen, for example, if `$name` or its declaration has either been - |misspelt or if an import is missing.""" - } +class MissingIdent(tree: untpd.Ident, treeKind: String, val name: Name)(using Context) +extends NotFoundMsg(MissingIdentID) { + def msg(using Context) = i"Not found: $treeKind$name" + def explain(using Context) = { + i"""|The identifier for `$treeKind$name` is not bound, that is, + |no declaration for this identifier can be found. + |That can happen, for example, if `$name` or its declaration has either been + |misspelt or if an import is missing.""" } +} - class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) - extends TypeMismatchMsg(found, expected)(TypeMismatchID): +class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) + extends TypeMismatchMsg(found, expected)(TypeMismatchID): + def msg(using Context) = // replace constrained TypeParamRefs and their typevars by their bounds where possible // and the bounds are not f-bounds. // The idea is that if the bounds are also not-subtypes of each other to report @@ -272,2265 +276,2633 @@ import cc.CaptureSet.IdentityCaptRefMap case _ => mapOver(tp) - def msg = - val found1 = reported(found) - reported.setVariance(-1) - val expected1 = reported(expected) - val (found2, expected2) = - if (found1 frozen_<:< expected1) || reported.fbounded then (found, expected) - else (found1, expected1) - val postScript = addenda.find(!_.isEmpty) match - case Some(p) => p - case None => - if expected.isTopType || found.isBottomType - then "" - else ctx.typer.importSuggestionAddendum(ViewProto(found.widen, expected)) - val (where, printCtx) = Formatting.disambiguateTypes(found2, expected2) - val whereSuffix = if (where.isEmpty) where else s"\n\n$where" - val (foundStr, expectedStr) = Formatting.typeDiff(found2, expected2)(using printCtx) - s"""|Found: $foundStr - |Required: $expectedStr""".stripMargin - + whereSuffix + postScript - - override def explain = - val treeStr = inTree.map(x => s"\nTree: ${x.show}").getOrElse("") - treeStr + "\n" + super.explain - - end TypeMismatch - - class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context) - extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { - //println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG - - def msg = { - import core.Flags._ - val maxDist = 3 // maximal number of differences to be considered for a hint - val missing = name.show - - // The symbols of all non-synthetic, non-private members of `site` - // that are of the same type/term kind as the missing member. - def candidates: Set[Symbol] = - for - bc <- site.widen.baseClasses.toSet - sym <- bc.info.decls.filter(sym => - sym.isType == name.isTypeName - && !sym.isConstructor - && !sym.flagsUNSAFE.isOneOf(Synthetic | Private)) - yield sym - - // Calculate Levenshtein distance - def distance(s1: String, s2: String): Int = - val dist = Array.ofDim[Int](s2.length + 1, s1.length + 1) - for - j <- 0 to s2.length - i <- 0 to s1.length - do - dist(j)(i) = - if j == 0 then i - else if i == 0 then j - else if s2(j - 1) == s1(i - 1) then dist(j - 1)(i - 1) - else (dist(j - 1)(i) min dist(j)(i - 1) min dist(j - 1)(i - 1)) + 1 - dist(s2.length)(s1.length) - - // A list of possible candidate symbols with their Levenstein distances - // to the name of the missing member - def closest: List[(Int, Symbol)] = candidates - .toList - .map(sym => (distance(sym.name.show, missing), sym)) - .filter((d, sym) => d <= maxDist && d < missing.length && d < sym.name.show.length) - .sortBy((d, sym) => (d, sym.name.show)) // sort by distance first, alphabetically second - - val enumClause = - if ((name eq nme.values) || (name eq nme.valueOf)) && site.classSymbol.companionClass.isEnumClass then - val kind = if name eq nme.values then i"${nme.values} array" else i"${nme.valueOf} lookup method" - // an assumption is made here that the values and valueOf methods were not generated - // because the enum defines non-singleton cases - i""" - |Although ${site.classSymbol.companionClass} is an enum, it has non-singleton cases, - |meaning a $kind is not defined""" - else - "" - - def prefixEnumClause(addendum: String) = - if enumClause.nonEmpty then s".$enumClause$addendum" else addendum - - val finalAddendum = - if addendum.nonEmpty then prefixEnumClause(addendum) - else closest match - case (d, sym) :: _ => - val siteName = site match - case site: NamedType => site.name.show - case site => i"$site" - val showName = - // Add .type to the name if it is a module - if sym.is(ModuleClass) then s"${sym.name.show}.type" - else sym.name.show - s" - did you mean $siteName.$showName?$enumClause" - case Nil => prefixEnumClause("") - - ex"$selected $name is not a member of ${site.widen}$finalAddendum" - } - - def explain = "" - } - - class EarlyDefinitionsNotSupported()(using Context) - extends SyntaxMsg(EarlyDefinitionsNotSupportedID) { - def msg = "Early definitions are not supported; use trait parameters instead" - - def explain = { - val code1 = - """|trait Logging { - | val f: File - | f.open() - | onExit(f.close()) - | def log(msg: String) = f.write(msg) - |} - | - |class B extends Logging { - | val f = new File("log.data") // triggers a NullPointerException - |} - | - |// early definition gets around the NullPointerException - |class C extends { - | val f = new File("log.data") - |} with Logging""".stripMargin - - val code2 = - """|trait Logging(f: File) { - | f.open() - | onExit(f.close()) - | def log(msg: String) = f.write(msg) - |} - | - |class C extends Logging(new File("log.data"))""".stripMargin - - em"""|Earlier versions of Scala did not support trait parameters and "early - |definitions" (also known as "early initializers") were used as an alternative. - | - |Example of old syntax: - | - |$code1 - | - |The above code can now be written as: - | - |$code2 - |""" - } - } - - class TopLevelImplicitClass(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(TopLevelImplicitClassID) { - def msg = em"""An ${hl("implicit class")} may not be top-level""" - - def explain = { - val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef: @unchecked - val exampleArgs = - if(constr0.termParamss.isEmpty) "..." - else constr0.termParamss(0).map(_.withMods(untpd.Modifiers()).show).mkString(", ") - def defHasBody[T] = impl.body.exists(!_.isEmpty) - val exampleBody = if (defHasBody) "{\n ...\n }" else "" - em"""|There may not be any method, member or object in scope with the same name as - |the implicit class and a case class automatically gets a companion object with - |the same name created by the compiler which would cause a naming conflict if it - |were allowed. - | | - |To resolve the conflict declare ${cdef.name} inside of an ${hl("object")} then import the class - |from the object at the use site if needed, for example: - | - |object Implicits { - | implicit class ${cdef.name}($exampleArgs)$exampleBody - |} - | - |// At the use site: - |import Implicits.${cdef.name}""" - } - } - - class ImplicitCaseClass(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(ImplicitCaseClassID) { - def msg = em"""A ${hl("case class")} may not be defined as ${hl("implicit")}""" - - def explain = - em"""|Implicit classes may not be case classes. Instead use a plain class: - | - |implicit class ${cdef.name}... - | - |""" - } + val found1 = reported(found) + reported.setVariance(-1) + val expected1 = reported(expected) + val (found2, expected2) = + if (found1 frozen_<:< expected1) || reported.fbounded then (found, expected) + else (found1, expected1) + val (foundStr, expectedStr) = Formatting.typeDiff(found2, expected2) + i"""|Found: $foundStr + |Required: $expectedStr""" + end msg + + override def msgPostscript(using Context) = + def importSuggestions = + if expected.isTopType || found.isBottomType then "" + else ctx.typer.importSuggestionAddendum(ViewProto(found.widen, expected)) + super.msgPostscript + ++ addenda.dropWhile(_.isEmpty).headOption.getOrElse(importSuggestions) + + override def explain(using Context) = + val treeStr = inTree.map(x => s"\nTree: ${x.show}").getOrElse("") + treeStr + "\n" + super.explain + +end TypeMismatch + +class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context) +extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { + //println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG + + def msg(using Context) = { + import core.Flags._ + val maxDist = 3 // maximal number of differences to be considered for a hint + val missing = name.show + + // The symbols of all non-synthetic, non-private members of `site` + // that are of the same type/term kind as the missing member. + def candidates: Set[Symbol] = + for + bc <- site.widen.baseClasses.toSet + sym <- bc.info.decls.filter(sym => + sym.isType == name.isTypeName + && !sym.isConstructor + && !sym.flagsUNSAFE.isOneOf(Synthetic | Private)) + yield sym + + // Calculate Levenshtein distance + def distance(s1: String, s2: String): Int = + val dist = Array.ofDim[Int](s2.length + 1, s1.length + 1) + for + j <- 0 to s2.length + i <- 0 to s1.length + do + dist(j)(i) = + if j == 0 then i + else if i == 0 then j + else if s2(j - 1) == s1(i - 1) then dist(j - 1)(i - 1) + else (dist(j - 1)(i) min dist(j)(i - 1) min dist(j - 1)(i - 1)) + 1 + dist(s2.length)(s1.length) + + // A list of possible candidate symbols with their Levenstein distances + // to the name of the missing member + def closest: List[(Int, Symbol)] = candidates + .toList + .map(sym => (distance(sym.name.show, missing), sym)) + .filter((d, sym) => d <= maxDist && d < missing.length && d < sym.name.show.length) + .sortBy((d, sym) => (d, sym.name.show)) // sort by distance first, alphabetically second + + val enumClause = + if ((name eq nme.values) || (name eq nme.valueOf)) && site.classSymbol.companionClass.isEnumClass then + val kind = if name eq nme.values then i"${nme.values} array" else i"${nme.valueOf} lookup method" + // an assumption is made here that the values and valueOf methods were not generated + // because the enum defines non-singleton cases + i""" + |Although ${site.classSymbol.companionClass} is an enum, it has non-singleton cases, + |meaning a $kind is not defined""" + else + "" - class ImplicitClassPrimaryConstructorArity()(using Context) - extends SyntaxMsg(ImplicitClassPrimaryConstructorArityID){ - def msg = "Implicit classes must accept exactly one primary constructor parameter" - def explain = { - val example = "implicit class RichDate(date: java.util.Date)" - em"""Implicit classes may only take one non-implicit argument in their constructor. For example: + def prefixEnumClause(addendum: String) = + if enumClause.nonEmpty then s".$enumClause$addendum" else addendum + + val finalAddendum = + if addendum.nonEmpty then prefixEnumClause(addendum) + else closest match + case (d, sym) :: _ => + val siteName = site match + case site: NamedType => site.name.show + case site => i"$site" + val showName = + // Add .type to the name if it is a module + if sym.is(ModuleClass) then s"${sym.name.show}.type" + else sym.name.show + s" - did you mean $siteName.$showName?$enumClause" + case Nil => prefixEnumClause("") + + i"$selected $name is not a member of ${site.widen}$finalAddendum" + } + + def explain(using Context) = "" +} + +class EarlyDefinitionsNotSupported()(using Context) +extends SyntaxMsg(EarlyDefinitionsNotSupportedID) { + def msg(using Context) = "Early definitions are not supported; use trait parameters instead" + + def explain(using Context) = { + val code1 = + """|trait Logging { + | val f: File + | f.open() + | onExit(f.close()) + | def log(msg: String) = f.write(msg) + |} | - | $example + |class B extends Logging { + | val f = new File("log.data") // triggers a NullPointerException + |} | - |While it’s possible to create an implicit class with more than one non-implicit argument, - |such classes aren’t used during implicit lookup. - |""" - } - } - - class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(using Context) - extends SyntaxMsg(ObjectMayNotHaveSelfTypeID) { - def msg = em"""${hl("object")}s must not have a self ${hl("type")}""" - - def explain = { - val untpd.ModuleDef(name, tmpl) = mdef - val ValDef(_, selfTpt, _) = tmpl.self - em"""|${hl("object")}s must not have a self ${hl("type")}: - | - |Consider these alternative solutions: - | - Create a trait or a class instead of an object - | - Let the object extend a trait containing the self type: - | - | object $name extends ${selfTpt.show}""" - } - } - - class RepeatedModifier(modifier: String)(implicit ctx:Context) - extends SyntaxMsg(RepeatedModifierID) { - def msg = em"""Repeated modifier $modifier""" - - def explain = { - val code1 = em"""private private val Origin = Point(0, 0)""" - val code2 = em"""private final val Origin = Point(0, 0)""" - em"""This happens when you accidentally specify the same modifier twice. - | - |Example: - | - |$code1 - | - |instead of - | - |$code2 - | - |""" - } - } - - class InterpolatedStringError()(implicit ctx:Context) - extends SyntaxMsg(InterpolatedStringErrorID) { - def msg = "Error in interpolated string: identifier or block expected" - def explain = { - val code1 = "s\"$new Point(0, 0)\"" - val code2 = "s\"${new Point(0, 0)}\"" - em"""|This usually happens when you forget to place your expressions inside curly braces. - | - |$code1 - | - |should be written as - | - |$code2 - |""" - } - } - - class UnboundPlaceholderParameter()(implicit ctx:Context) - extends SyntaxMsg(UnboundPlaceholderParameterID) { - def msg = em"""Unbound placeholder parameter; incorrect use of ${hl("_")}""" - def explain = - em"""|The ${hl("_")} placeholder syntax was used where it could not be bound. - |Consider explicitly writing the variable binding. - | - |This can be done by replacing ${hl("_")} with a variable (eg. ${hl("x")}) - |and adding ${hl("x =>")} where applicable. - | - |Example before: - | - |${hl("{ _ }")} - | - |Example after: - | - |${hl("x => { x }")} - | - |Another common occurrence for this error is defining a val with ${hl("_")}: - | - |${hl("val a = _")} - | - |But this val definition isn't very useful, it can never be assigned - |another value. And thus will always remain uninitialized. - |Consider replacing the ${hl("val")} with ${hl("var")}: - | - |${hl("var a = _")} - | - |Note that this use of ${hl("_")} is not placeholder syntax, - |but an uninitialized var definition. - |Only fields can be left uninitialized in this manner; local variables - |must be initialized. - | - |Another occurrence for this error is self type definition. - |The ${hl("_")} can be replaced with ${hl("this")}. - | - |Example before: - | - |${hl("trait A { _: B => ... ")} - | - |Example after: - | - |${hl("trait A { this: B => ... ")} - |""" - } - - class IllegalStartSimpleExpr(illegalToken: String)(using Context) - extends SyntaxMsg(IllegalStartSimpleExprID) { - def msg = em"expression expected but ${Red(illegalToken)} found" - def explain = { - em"""|An expression cannot start with ${Red(illegalToken)}.""" - } - } + |// early definition gets around the NullPointerException + |class C extends { + | val f = new File("log.data") + |} with Logging""".stripMargin + + val code2 = + """|trait Logging(f: File) { + | f.open() + | onExit(f.close()) + | def log(msg: String) = f.write(msg) + |} + | + |class C extends Logging(new File("log.data"))""".stripMargin - class MissingReturnType()(implicit ctx:Context) - extends SyntaxMsg(MissingReturnTypeID) { - def msg = "Missing return type" - def explain = - em"""|An abstract declaration must have a return type. For example: - | - |trait Shape: - | ${hl("def area: Double")} // abstract declaration returning a Double""" + i"""|Earlier versions of Scala did not support trait parameters and "early + |definitions" (also known as "early initializers") were used as an alternative. + | + |Example of old syntax: + | + |$code1 + | + |The above code can now be written as: + | + |$code2 + |""" } - - class MissingReturnTypeWithReturnStatement(method: Symbol)(using Context) - extends SyntaxMsg(MissingReturnTypeWithReturnStatementID) { - def msg = em"$method has a return statement; it needs a result type" - def explain = - em"""|If a method contains a ${hl("return")} statement, it must have an - |explicit return type. For example: - | - |${hl("def good: Int /* explicit return type */ = return 1")}""" +} + +class TopLevelImplicitClass(cdef: untpd.TypeDef)(using Context) +extends SyntaxMsg(TopLevelImplicitClassID) { + def msg(using Context) = i"""An ${hl("implicit class")} may not be top-level""" + + def explain(using Context) = { + val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef: @unchecked + val exampleArgs = + if(constr0.termParamss.isEmpty) "..." + else constr0.termParamss(0).map(_.withMods(untpd.Modifiers()).show).mkString(", ") + def defHasBody[T] = impl.body.exists(!_.isEmpty) + val exampleBody = if (defHasBody) "{\n ...\n }" else "" + i"""|There may not be any method, member or object in scope with the same name as + |the implicit class and a case class automatically gets a companion object with + |the same name created by the compiler which would cause a naming conflict if it + |were allowed. + | | + |To resolve the conflict declare ${cdef.name} inside of an ${hl("object")} then import the class + |from the object at the use site if needed, for example: + | + |object Implicits { + | implicit class ${cdef.name}($exampleArgs)$exampleBody + |} + | + |// At the use site: + |import Implicits.${cdef.name}""" } +} - class YieldOrDoExpectedInForComprehension()(using Context) - extends SyntaxMsg(YieldOrDoExpectedInForComprehensionID) { - def msg = em"${hl("yield")} or ${hl("do")} expected" - - def explain = - em"""|When the enumerators in a for comprehension are not placed in parentheses or - |braces, a ${hl("do")} or ${hl("yield")} statement is required after the enumerators - |section of the comprehension. - | - |You can save some keystrokes by omitting the parentheses and writing - | - |${hl("val numbers = for i <- 1 to 3 yield i")} - | - | instead of - | - |${hl("val numbers = for (i <- 1 to 3) yield i")} - | - |but the ${hl("yield")} keyword is still required. - | - |For comprehensions that simply perform a side effect without yielding anything - |can also be written without parentheses but a ${hl("do")} keyword has to be - |included. For example, - | - |${hl("for (i <- 1 to 3) println(i)")} - | - |can be written as - | - |${hl("for i <- 1 to 3 do println(i) // notice the 'do' keyword")} - | - |""" - } +class ImplicitCaseClass(cdef: untpd.TypeDef)(using Context) +extends SyntaxMsg(ImplicitCaseClassID) { + def msg(using Context) = i"""A ${hl("case class")} may not be defined as ${hl("implicit")}""" - class ProperDefinitionNotFound()(using Context) - extends Message(ProperDefinitionNotFoundID) { - def kind = MessageKind.DocComment - def msg = em"""Proper definition was not found in ${hl("@usecase")}""" - - def explain = { - val noUsecase = - "def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That" - - val usecase = - """|/** Map from List[A] => List[B] - | * - | * @usecase def map[B](f: A => B): List[B] - | */ - |def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That - |""".stripMargin - - em"""|Usecases are only supported for ${hl("def")}s. They exist because with Scala's - |advanced type-system, we sometimes end up with seemingly scary signatures. - |The usage of these methods, however, needs not be - for instance the ${hl("map")} - |function - | - |${hl("List(1, 2, 3).map(2 * _) // res: List(2, 4, 6)")} - | - |is easy to understand and use - but has a rather bulky signature: - | - |$noUsecase - | - |to mitigate this and ease the usage of such functions we have the ${hl("@usecase")} - |annotation for docstrings. Which can be used like this: - | - |$usecase - | - |When creating the docs, the signature of the method is substituted by the - |usecase and the compiler makes sure that it is valid. Because of this, you're - |only allowed to use ${hl("def")}s when defining usecases.""" - } + def explain(using Context) = + i"""|Implicit classes may not be case classes. Instead use a plain class: + | + |implicit class ${cdef.name}... + | + |""" +} + +class ImplicitClassPrimaryConstructorArity()(using Context) +extends SyntaxMsg(ImplicitClassPrimaryConstructorArityID){ + def msg(using Context) = "Implicit classes must accept exactly one primary constructor parameter" + def explain(using Context) = { + val example = "implicit class RichDate(date: java.util.Date)" + i"""Implicit classes may only take one non-implicit argument in their constructor. For example: + | + | $example + | + |While it’s possible to create an implicit class with more than one non-implicit argument, + |such classes aren’t used during implicit lookup. + |""" } +} - class ByNameParameterNotSupported(tpe: untpd.Tree)(using Context) - extends SyntaxMsg(ByNameParameterNotSupportedID) { - def msg = em"By-name parameter type ${tpe} not allowed here." - - def explain = - em"""|By-name parameters act like functions that are only evaluated when referenced, - |allowing for lazy evaluation of a parameter. - | - |An example of using a by-name parameter would look like: - |${hl("def func(f: => Boolean) = f // 'f' is evaluated when referenced within the function")} - | - |An example of the syntax of passing an actual function as a parameter: - |${hl("def func(f: (Boolean => Boolean)) = f(true)")} - | - |or: - | - |${hl("def func(f: Boolean => Boolean) = f(true)")} - | - |And the usage could be as such: - |${hl("func(bool => // do something...)")} - |""" - } +class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(using Context) +extends SyntaxMsg(ObjectMayNotHaveSelfTypeID) { + def msg(using Context) = i"""${hl("object")}s must not have a self ${hl("type")}""" - class WrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[ParamInfo], actual: List[untpd.Tree])(using Context) - extends SyntaxMsg(WrongNumberOfTypeArgsID) { - - private val expectedCount = expectedArgs.length - private val actualCount = actual.length - private val msgPrefix = if (actualCount > expectedCount) "Too many" else "Not enough" - - def msg = - val expectedArgString = expectedArgs - .map(_.paramName.unexpandedName.show) - .mkString("[", ", ", "]") - val actualArgString = actual.map(_.show).mkString("[", ", ", "]") - val prettyName = - try fntpe.termSymbol match - case NoSymbol => fntpe.show - case symbol => symbol.showFullName - catch case NonFatal(ex) => fntpe.show - em"""|$msgPrefix type arguments for $prettyName$expectedArgString - |expected: $expectedArgString - |actual: $actualArgString""".stripMargin - - def explain = { - val tooManyTypeParams = - """|val tuple2: (Int, String) = (1, "one") - |val list: List[(Int, String)] = List(tuple2)""".stripMargin - - if (actualCount > expectedCount) - em"""|You have supplied too many type parameters - | - |For example List takes a single type parameter (List[A]) - |If you need to hold more types in a list then you need to combine them - |into another data type that can contain the number of types you need, - |In this example one solution would be to use a Tuple: - | - |${tooManyTypeParams}""" - else - em"""|You have not supplied enough type parameters - |If you specify one type parameter then you need to specify every type parameter.""" - } + def explain(using Context) = { + val untpd.ModuleDef(name, tmpl) = mdef + val ValDef(_, selfTpt, _) = tmpl.self + i"""|${hl("object")}s must not have a self ${hl("type")}: + | + |Consider these alternative solutions: + | - Create a trait or a class instead of an object + | - Let the object extend a trait containing the self type: + | + | object $name extends ${selfTpt.show}""" } +} - class IllegalVariableInPatternAlternative(name: Name)(using Context) - extends SyntaxMsg(IllegalVariableInPatternAlternativeID) { - def msg = em"Illegal variable $name in pattern alternative" - def explain = { - val varInAlternative = - """|def g(pair: (Int,Int)): Int = pair match { - | case (1, n) | (n, 1) => n - | case _ => 0 - |}""".stripMargin - - val fixedVarInAlternative = - """|def g(pair: (Int,Int)): Int = pair match { - | case (1, n) => n - | case (n, 1) => n - | case _ => 0 - |}""".stripMargin - - em"""|Variables are not allowed within alternate pattern matches. You can workaround - |this issue by adding additional cases for each alternative. For example, the - |illegal function: - | - |$varInAlternative - |could be implemented by moving each alternative into a separate case: - | - |$fixedVarInAlternative""" - } - } +class RepeatedModifier(modifier: String)(implicit ctx:Context) +extends SyntaxMsg(RepeatedModifierID) { + def msg(using Context) = i"""Repeated modifier $modifier""" - class IdentifierExpected(identifier: String)(using Context) - extends SyntaxMsg(IdentifierExpectedID) { - def msg = "identifier expected" - def explain = { - val wrongIdentifier = em"def foo: $identifier = {...}" - val validIdentifier = em"def foo = {...}" - em"""|An identifier expected, but $identifier found. This could be because - |$identifier is not a valid identifier. As a workaround, the compiler could - |infer the type for you. For example, instead of: - | - |$wrongIdentifier - | - |Write your code like: - | - |$validIdentifier - | - |""" - } + def explain(using Context) = { + val code1 = "private private val Origin = Point(0, 0)" + val code2 = "private final val Origin = Point(0, 0)" + i"""This happens when you accidentally specify the same modifier twice. + | + |Example: + | + |$code1 + | + |instead of + | + |$code2 + | + |""" } +} - class AuxConstructorNeedsNonImplicitParameter()(implicit ctx:Context) - extends SyntaxMsg(AuxConstructorNeedsNonImplicitParameterID) { - def msg = "Auxiliary constructor needs non-implicit parameter list" - def explain = - em"""|Only the primary constructor is allowed an ${hl("implicit")} parameter list; - |auxiliary constructors need non-implicit parameter lists. When a primary - |constructor has an implicit argslist, auxiliary constructors that call the - |primary constructor must specify the implicit value. - | - |To resolve this issue check for: - | - Forgotten parenthesis on ${hl("this")} (${hl("def this() = { ... }")}) - | - Auxiliary constructors specify the implicit value - |""" +class InterpolatedStringError()(implicit ctx:Context) +extends SyntaxMsg(InterpolatedStringErrorID) { + def msg(using Context) = "Error in interpolated string: identifier or block expected" + def explain(using Context) = { + val code1 = "s\"$new Point(0, 0)\"" + val code2 = "s\"${new Point(0, 0)}\"" + i"""|This usually happens when you forget to place your expressions inside curly braces. + | + |$code1 + | + |should be written as + | + |$code2 + |""" } +} - class IllegalLiteral()(using Context) - extends SyntaxMsg(IllegalLiteralID) { - def msg = "Illegal literal" - def explain = - em"""|Available literals can be divided into several groups: - | - Integer literals: 0, 21, 0xFFFFFFFF, -42L - | - Floating Point Literals: 0.0, 1e30f, 3.14159f, 1.0e-100, .1 - | - Boolean Literals: true, false - | - Character Literals: 'a', '\u0041', '\n' - | - String Literals: "Hello, World!" - | - null - |""" - } +class UnboundPlaceholderParameter()(implicit ctx:Context) +extends SyntaxMsg(UnboundPlaceholderParameterID) { + def msg(using Context) = i"""Unbound placeholder parameter; incorrect use of ${hl("_")}""" + def explain(using Context) = + i"""|The ${hl("_")} placeholder syntax was used where it could not be bound. + |Consider explicitly writing the variable binding. + | + |This can be done by replacing ${hl("_")} with a variable (eg. ${hl("x")}) + |and adding ${hl("x =>")} where applicable. + | + |Example before: + | + |${hl("{ _ }")} + | + |Example after: + | + |${hl("x => { x }")} + | + |Another common occurrence for this error is defining a val with ${hl("_")}: + | + |${hl("val a = _")} + | + |But this val definition isn't very useful, it can never be assigned + |another value. And thus will always remain uninitialized. + |Consider replacing the ${hl("val")} with ${hl("var")}: + | + |${hl("var a = _")} + | + |Note that this use of ${hl("_")} is not placeholder syntax, + |but an uninitialized var definition. + |Only fields can be left uninitialized in this manner; local variables + |must be initialized. + | + |Another occurrence for this error is self type definition. + |The ${hl("_")} can be replaced with ${hl("this")}. + | + |Example before: + | + |${hl("trait A { _: B => ... ")} + | + |Example after: + | + |${hl("trait A { this: B => ... ")} + |""" +} - class LossyWideningConstantConversion(sourceType: Type, targetType: Type)(using Context) - extends Message(LossyWideningConstantConversionID): - def kind = MessageKind.LossyConversion - def msg = em"""|Widening conversion from $sourceType to $targetType loses precision. - |Write `.to$targetType` instead.""".stripMargin - def explain = "" - - class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context) - extends Message(PatternMatchExhaustivityID) { - def kind = MessageKind.PatternMatchExhaustivity - lazy val uncovered = uncoveredFn - def msg = - val addendum = if hasMore then "(More unmatched cases are elided)" else "" - em"""|${hl("match")} may not be exhaustive. - | - |It would fail on pattern case: $uncovered - |$addendum""" - - - def explain = - em"""|There are several ways to make the match exhaustive: - | - Add missing cases as shown in the warning - | - If an extractor always return ${hl("Some(...)")}, write ${hl("Some[X]")} for its return type - | - Add a ${hl("case _ => ...")} at the end to match all remaining cases - |""" +class IllegalStartSimpleExpr(illegalToken: String)(using Context) +extends SyntaxMsg(IllegalStartSimpleExprID) { + def msg(using Context) = i"expression expected but ${Red(illegalToken)} found" + def explain(using Context) = { + i"""|An expression cannot start with ${Red(illegalToken)}.""" } +} - class UncheckedTypePattern(msgFn: => String)(using Context) - extends PatternMatchMsg(UncheckedTypePatternID) { - def msg = msgFn - def explain = - em"""|Type arguments and type refinements are erased during compile time, thus it's - |impossible to check them at run-time. - | - |You can either replace the type arguments by ${hl("_")} or use `@unchecked`. - |""" - } +class MissingReturnType()(implicit ctx:Context) +extends SyntaxMsg(MissingReturnTypeID) { + def msg(using Context) = "Missing return type" + def explain(using Context) = + i"""|An abstract declaration must have a return type. For example: + | + |trait Shape: + | ${hl("def area: Double")} // abstract declaration returning a Double""" +} + +class MissingReturnTypeWithReturnStatement(method: Symbol)(using Context) +extends SyntaxMsg(MissingReturnTypeWithReturnStatementID) { + def msg(using Context) = i"$method has a return statement; it needs a result type" + def explain(using Context) = + i"""|If a method contains a ${hl("return")} statement, it must have an + |explicit return type. For example: + | + |${hl("def good: Int /* explicit return type */ = return 1")}""" +} - class MatchCaseUnreachable()(using Context) - extends Message(MatchCaseUnreachableID) { - def kind = MessageKind.MatchCaseUnreachable - def msg = "Unreachable case" - def explain = "" - } +class YieldOrDoExpectedInForComprehension()(using Context) +extends SyntaxMsg(YieldOrDoExpectedInForComprehensionID) { + def msg(using Context) = i"${hl("yield")} or ${hl("do")} expected" - class MatchCaseOnlyNullWarning()(using Context) - extends PatternMatchMsg(MatchCaseOnlyNullWarningID) { - def msg = em"""Unreachable case except for ${hl("null")} (if this is intentional, consider writing ${hl("case null =>")} instead).""" - def explain = "" - } + def explain(using Context) = + i"""|When the enumerators in a for comprehension are not placed in parentheses or + |braces, a ${hl("do")} or ${hl("yield")} statement is required after the enumerators + |section of the comprehension. + | + |You can save some keystrokes by omitting the parentheses and writing + | + |${hl("val numbers = for i <- 1 to 3 yield i")} + | + | instead of + | + |${hl("val numbers = for (i <- 1 to 3) yield i")} + | + |but the ${hl("yield")} keyword is still required. + | + |For comprehensions that simply perform a side effect without yielding anything + |can also be written without parentheses but a ${hl("do")} keyword has to be + |included. For example, + | + |${hl("for (i <- 1 to 3) println(i)")} + | + |can be written as + | + |${hl("for i <- 1 to 3 do println(i) // notice the 'do' keyword")} + | + |""" +} + +class ProperDefinitionNotFound()(using Context) +extends Message(ProperDefinitionNotFoundID) { + def kind = MessageKind.DocComment + def msg(using Context) = i"""Proper definition was not found in ${hl("@usecase")}""" + + def explain(using Context) = { + val noUsecase = + "def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That" + + val usecase = + """|/** Map from List[A] => List[B] + | * + | * @usecase def map[B](f: A => B): List[B] + | */ + |def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That + |""".stripMargin - class MatchableWarning(tp: Type, pattern: Boolean)(using Context) - extends TypeMsg(MatchableWarningID) { - def msg = - val kind = if pattern then "pattern selector" else "value" - em"""${kind} should be an instance of Matchable,, - |but it has unmatchable type $tp instead""" - - def explain = - if pattern then - em"""A value of type $tp cannot be the selector of a match expression - |since it is not constrained to be `Matchable`. Matching on unconstrained - |values is disallowed since it can uncover implementation details that - |were intended to be hidden and thereby can violate paramtetricity laws - |for reasoning about programs. - | - |The restriction can be overridden by appending `.asMatchable` to - |the selector value. `asMatchable` needs to be imported from - |scala.compiletime. Example: - | - | import compiletime.asMatchable - | def f[X](x: X) = x.asMatchable match { ... }""" - else - em"""The value can be converted to a `Matchable` by appending `.asMatchable`. - |`asMatchable` needs to be imported from scala.compiletime.""" + i"""|Usecases are only supported for ${hl("def")}s. They exist because with Scala's + |advanced type-system, we sometimes end up with seemingly scary signatures. + |The usage of these methods, however, needs not be - for instance the ${hl("map")} + |function + | + |${hl("List(1, 2, 3).map(2 * _) // res: List(2, 4, 6)")} + | + |is easy to understand and use - but has a rather bulky signature: + | + |$noUsecase + | + |to mitigate this and ease the usage of such functions we have the ${hl("@usecase")} + |annotation for docstrings. Which can be used like this: + | + |$usecase + | + |When creating the docs, the signature of the method is substituted by the + |usecase and the compiler makes sure that it is valid. Because of this, you're + |only allowed to use ${hl("def")}s when defining usecases.""" } +} - class SeqWildcardPatternPos()(using Context) - extends SyntaxMsg(SeqWildcardPatternPosID) { - def msg = em"""${hl("*")} can be used only for last argument""" - def explain = { - val code = - """def sumOfTheFirstTwo(list: List[Int]): Int = list match { - | case List(first, second, x*) => first + second - | case _ => 0 - |}""" - em"""|Sequence wildcard pattern is expected at the end of an argument list. - |This pattern matches any remaining elements in a sequence. - |Consider the following example: - | - |$code - | - |Calling: - | - |${hl("sumOfTheFirstTwo(List(1, 2, 10))")} - | - |would give 3 as a result""" - } - } +class ByNameParameterNotSupported(tpe: untpd.Tree)(using Context) +extends SyntaxMsg(ByNameParameterNotSupportedID) { + def msg(using Context) = i"By-name parameter type ${tpe} not allowed here." - class IllegalStartOfSimplePattern()(using Context) - extends SyntaxMsg(IllegalStartOfSimplePatternID) { - def msg = "pattern expected" - def explain = { - val sipCode = - """def f(x: Int, y: Int) = x match { - | case `y` => ... - |} - """ - val constructorPatternsCode = - """case class Person(name: String, age: Int) + def explain(using Context) = + i"""|By-name parameters act like functions that are only evaluated when referenced, + |allowing for lazy evaluation of a parameter. + | + |An example of using a by-name parameter would look like: + |${hl("def func(f: => Boolean) = f // 'f' is evaluated when referenced within the function")} + | + |An example of the syntax of passing an actual function as a parameter: + |${hl("def func(f: (Boolean => Boolean)) = f(true)")} + | + |or: + | + |${hl("def func(f: Boolean => Boolean) = f(true)")} + | + |And the usage could be as such: + |${hl("func(bool => // do something...)")} + |""" +} + +class WrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[ParamInfo], actual: List[untpd.Tree])(using Context) +extends SyntaxMsg(WrongNumberOfTypeArgsID) { + + private val expectedCount = expectedArgs.length + private val actualCount = actual.length + private val msgPrefix = if (actualCount > expectedCount) "Too many" else "Not enough" + + def msg(using Context) = + val expectedArgString = expectedArgs + .map(_.paramName.unexpandedName.show) + .mkString("[", ", ", "]") + val actualArgString = actual.map(_.show).mkString("[", ", ", "]") + val prettyName = + try fntpe.termSymbol match + case NoSymbol => fntpe.show + case symbol => symbol.showFullName + catch case NonFatal(ex) => fntpe.show + i"""|$msgPrefix type arguments for $prettyName$expectedArgString + |expected: $expectedArgString + |actual: $actualArgString""" + + def explain(using Context) = { + val tooManyTypeParams = + """|val tuple2: (Int, String) = (1, "one") + |val list: List[(Int, String)] = List(tuple2)""".stripMargin + + if (actualCount > expectedCount) + i"""|You have supplied too many type parameters | - |def test(p: Person) = p match { - | case Person(name, age) => ... - |} - """ - val tupplePatternsCode = - """def swap(tuple: (String, Int)): (Int, String) = tuple match { - | case (text, number) => (number, text) - |} - """ - val patternSequencesCode = - """def getSecondValue(list: List[Int]): Int = list match { - | case List(_, second, x:_*) => second + |For example List takes a single type parameter (List[A]) + |If you need to hold more types in a list then you need to combine them + |into another data type that can contain the number of types you need, + |In this example one solution would be to use a Tuple: + | + |${tooManyTypeParams}""" + else + i"""|You have not supplied enough type parameters + |If you specify one type parameter then you need to specify every type parameter.""" + } +} + +class IllegalVariableInPatternAlternative(name: Name)(using Context) +extends SyntaxMsg(IllegalVariableInPatternAlternativeID) { + def msg(using Context) = i"Illegal variable $name in pattern alternative" + def explain(using Context) = { + val varInAlternative = + """|def g(pair: (Int,Int)): Int = pair match { + | case (1, n) | (n, 1) => n | case _ => 0 - |}""" - em"""|Simple patterns can be divided into several groups: - |- Variable Patterns: ${hl("case x => ...")}. - | It matches any value, and binds the variable name to that value. - | A special case is the wild-card pattern _ which is treated as if it was a fresh - | variable on each occurrence. - | - |- Typed Patterns: ${hl("case x: Int => ...")} or ${hl("case _: Int => ...")}. - | This pattern matches any value matched by the specified type; it binds the variable - | name to that value. - | - |- Literal Patterns: ${hl("case 123 => ...")} or ${hl("case 'A' => ...")}. - | This type of pattern matches any value that is equal to the specified literal. - | - |- Stable Identifier Patterns: - | - | $sipCode - | - | the match succeeds only if the x argument and the y argument of f are equal. - | - |- Constructor Patterns: - | - | $constructorPatternsCode - | - | The pattern binds all object's fields to the variable names (name and age, in this - | case). - | - |- Tuple Patterns: - | - | $tupplePatternsCode - | - | Calling: - | - | ${hl("""swap(("Luftballons", 99)""")} - | - | would give ${hl("""(99, "Luftballons")""")} as a result. - | - |- Pattern Sequences: - | - | $patternSequencesCode - | - | Calling: - | - | ${hl("getSecondValue(List(1, 10, 2))")} - | - | would give 10 as a result. - | This pattern is possible because a companion object for the List class has a method - | with the following signature: - | - | ${hl("def unapplySeq[A](x: List[A]): Some[List[A]]")} - |""" - } - } - - class PkgDuplicateSymbol(existing: Symbol)(using Context) - extends NamingMsg(PkgDuplicateSymbolID) { - def msg = em"Trying to define package with same name as $existing" - def explain = "" - } + |}""".stripMargin - class ExistentialTypesNoLongerSupported()(using Context) - extends SyntaxMsg(ExistentialTypesNoLongerSupportedID) { - def msg = - em"""|Existential types are no longer supported - - |use a wildcard or dependent type instead""" - def explain = - em"""|The use of existential types is no longer supported. - | - |You should use a wildcard or dependent type instead. - | - |For example: - | - |Instead of using ${hl("forSome")} to specify a type variable - | - |${hl("List[T forSome { type T }]")} - | - |Try using a wildcard type variable - | - |${hl("List[?]")} - |""" - } + val fixedVarInAlternative = + """|def g(pair: (Int,Int)): Int = pair match { + | case (1, n) => n + | case (n, 1) => n + | case _ => 0 + |}""".stripMargin - class UnboundWildcardType()(using Context) - extends SyntaxMsg(UnboundWildcardTypeID) { - def msg = "Unbound wildcard type" - def explain = - em"""|The wildcard type syntax (${hl("_")}) was used where it could not be bound. - |Replace ${hl("_")} with a non-wildcard type. If the type doesn't matter, - |try replacing ${hl("_")} with ${hl("Any")}. - | - |Examples: - | - |- Parameter lists - | - | Instead of: - | ${hl("def foo(x: _) = ...")} - | - | Use ${hl("Any")} if the type doesn't matter: - | ${hl("def foo(x: Any) = ...")} - | - |- Type arguments - | - | Instead of: - | ${hl("val foo = List[?](1, 2)")} - | - | Use: - | ${hl("val foo = List[Int](1, 2)")} - | - |- Type bounds - | - | Instead of: - | ${hl("def foo[T <: _](x: T) = ...")} - | - | Remove the bounds if the type doesn't matter: - | ${hl("def foo[T](x: T) = ...")} - | - |- ${hl("val")} and ${hl("def")} types - | - | Instead of: - | ${hl("val foo: _ = 3")} - | - | Use: - | ${hl("val foo: Int = 3")} - |""" + i"""|Variables are not allowed within alternate pattern matches. You can workaround + |this issue by adding additional cases for each alternative. For example, the + |illegal function: + | + |$varInAlternative + |could be implemented by moving each alternative into a separate case: + | + |$fixedVarInAlternative""" + } +} + +class IdentifierExpected(identifier: String)(using Context) +extends SyntaxMsg(IdentifierExpectedID) { + def msg(using Context) = "identifier expected" + def explain(using Context) = { + val wrongIdentifier = i"def foo: $identifier = {...}" + val validIdentifier = i"def foo = {...}" + i"""|An identifier expected, but $identifier found. This could be because + |$identifier is not a valid identifier. As a workaround, the compiler could + |infer the type for you. For example, instead of: + | + |$wrongIdentifier + | + |Write your code like: + | + |$validIdentifier + | + |""" } +} - class OverridesNothing(member: Symbol)(using Context) - extends DeclarationMsg(OverridesNothingID) { - def msg = em"""${member} overrides nothing""" - - def explain = - em"""|There must be a field or method with the name ${member.name} in a super - |class of ${member.owner} to override it. Did you misspell it? - |Are you extending the right classes? - |""" - } +class AuxConstructorNeedsNonImplicitParameter()(implicit ctx:Context) +extends SyntaxMsg(AuxConstructorNeedsNonImplicitParameterID) { + def msg(using Context) = "Auxiliary constructor needs non-implicit parameter list" + def explain(using Context) = + i"""|Only the primary constructor is allowed an ${hl("implicit")} parameter list; + |auxiliary constructors need non-implicit parameter lists. When a primary + |constructor has an implicit argslist, auxiliary constructors that call the + |primary constructor must specify the implicit value. + | + |To resolve this issue check for: + | - Forgotten parenthesis on ${hl("this")} (${hl("def this() = { ... }")}) + | - Auxiliary constructors specify the implicit value + |""" +} + +class IllegalLiteral()(using Context) +extends SyntaxMsg(IllegalLiteralID) { + def msg(using Context) = "Illegal literal" + def explain(using Context) = + i"""|Available literals can be divided into several groups: + | - Integer literals: 0, 21, 0xFFFFFFFF, -42L + | - Floating Point Literals: 0.0, 1e30f, 3.14159f, 1.0e-100, .1 + | - Boolean Literals: true, false + | - Character Literals: 'a', '\u0041', '\n' + | - String Literals: "Hello, World!" + | - null + |""" +} + +class LossyWideningConstantConversion(sourceType: Type, targetType: Type)(using Context) +extends Message(LossyWideningConstantConversionID): + def kind = MessageKind.LossyConversion + def msg(using Context) = i"""|Widening conversion from $sourceType to $targetType loses precision. + |Write `.to$targetType` instead.""" + def explain(using Context) = "" + +class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context) +extends Message(PatternMatchExhaustivityID) { + def kind = MessageKind.PatternMatchExhaustivity + lazy val uncovered = uncoveredFn + def msg(using Context) = + val addendum = if hasMore then "(More unmatched cases are elided)" else "" + i"""|${hl("match")} may not be exhaustive. + | + |It would fail on pattern case: $uncovered + |$addendum""" - class OverridesNothingButNameExists(member: Symbol, existing: List[Denotations.SingleDenotation])(using Context) - extends DeclarationMsg(OverridesNothingButNameExistsID) { - def msg = - val what = - if !existing.exists(_.symbol.hasTargetName(member.targetName)) - then "target name" - else "signature" - em"""${member} has a different $what than the overridden declaration""" - def explain = - val existingDecl: String = existing.map(_.showDcl).mkString(" \n") - em"""|There must be a non-final field or method with the name ${member.name} and the - |same parameter list in a super class of ${member.owner} to override it. - | - | ${member.showDcl} - | - |The super classes of ${member.owner} contain the following members - |named ${member.name}: - | ${existingDecl} - |""" - } - class OverrideError(override val msg: String) extends DeclarationMsg(OverrideErrorID): - def explain = "" - - class OverrideTypeMismatchError(override val msg: String, memberTp: Type, otherTp: Type)(using Context) - extends DeclarationMsg(OverrideTypeMismatchErrorID): - def explain = err.whyNoMatchStr(memberTp, otherTp) - override def canExplain = true - - class ForwardReferenceExtendsOverDefinition(value: Symbol, definition: Symbol)(using Context) - extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID) { - def msg = em"${definition.name} is a forward reference extending over the definition of ${value.name}" - - def explain = - em"""|${definition.name} is used before you define it, and the definition of ${value.name} - |appears between that use and the definition of ${definition.name}. - | - |Forward references are allowed only, if there are no value definitions between - |the reference and the referred method definition. - | - |Define ${definition.name} before it is used, - |or move the definition of ${value.name} so it does not appear between - |the declaration of ${definition.name} and its use, - |or define ${value.name} as lazy. - |""".stripMargin + def explain(using Context) = + i"""|There are several ways to make the match exhaustive: + | - Add missing cases as shown in the warning + | - If an extractor always return ${hl("Some(...)")}, write ${hl("Some[X]")} for its return type + | - Add a ${hl("case _ => ...")} at the end to match all remaining cases + |""" +} + +class UncheckedTypePattern(msgFn: => String)(using Context) + extends PatternMatchMsg(UncheckedTypePatternID) { + def msg(using Context) = msgFn + def explain(using Context) = + i"""|Type arguments and type refinements are erased during compile time, thus it's + |impossible to check them at run-time. + | + |You can either replace the type arguments by ${hl("_")} or use `@unchecked`. + |""" +} + +class MatchCaseUnreachable()(using Context) +extends Message(MatchCaseUnreachableID) { + def kind = MessageKind.MatchCaseUnreachable + def msg(using Context) = "Unreachable case" + def explain(using Context) = "" +} + +class MatchCaseOnlyNullWarning()(using Context) +extends PatternMatchMsg(MatchCaseOnlyNullWarningID) { + def msg(using Context) = i"""Unreachable case except for ${hl("null")} (if this is intentional, consider writing ${hl("case null =>")} instead).""" + def explain(using Context) = "" +} + +class MatchableWarning(tp: Type, pattern: Boolean)(using Context) +extends TypeMsg(MatchableWarningID) { + def msg(using Context) = + val kind = if pattern then "pattern selector" else "value" + i"""${kind} should be an instance of Matchable,, + |but it has unmatchable type $tp instead""" + + def explain(using Context) = + if pattern then + i"""A value of type $tp cannot be the selector of a match expression + |since it is not constrained to be `Matchable`. Matching on unconstrained + |values is disallowed since it can uncover implementation details that + |were intended to be hidden and thereby can violate paramtetricity laws + |for reasoning about programs. + | + |The restriction can be overridden by appending `.asMatchable` to + |the selector value. `asMatchable` needs to be imported from + |scala.compiletime. Example: + | + | import compiletime.asMatchable + | def f[X](x: X) = x.asMatchable match { ... }""" + else + i"""The value can be converted to a `Matchable` by appending `.asMatchable`. + |`asMatchable` needs to be imported from scala.compiletime.""" +} + +class SeqWildcardPatternPos()(using Context) +extends SyntaxMsg(SeqWildcardPatternPosID) { + def msg(using Context) = i"""${hl("*")} can be used only for last argument""" + def explain(using Context) = { + val code = + """def sumOfTheFirstTwo(list: List[Int]): Int = list match { + | case List(first, second, x*) => first + second + | case _ => 0 + |}""" + i"""|Sequence wildcard pattern is expected at the end of an argument list. + |This pattern matches any remaining elements in a sequence. + |Consider the following example: + | + |$code + | + |Calling: + | + |${hl("sumOfTheFirstTwo(List(1, 2, 10))")} + | + |would give 3 as a result""" + } +} + +class IllegalStartOfSimplePattern()(using Context) +extends SyntaxMsg(IllegalStartOfSimplePatternID) { + def msg(using Context) = "pattern expected" + def explain(using Context) = { + val sipCode = + """def f(x: Int, y: Int) = x match { + | case `y` => ... + |} + """ + val constructorPatternsCode = + """case class Person(name: String, age: Int) + | + |def test(p: Person) = p match { + | case Person(name, age) => ... + |} + """ + val tupplePatternsCode = + """def swap(tuple: (String, Int)): (Int, String) = tuple match { + | case (text, number) => (number, text) + |} + """ + val patternSequencesCode = + """def getSecondValue(list: List[Int]): Int = list match { + | case List(_, second, x:_*) => second + | case _ => 0 + |}""" + i"""|Simple patterns can be divided into several groups: + |- Variable Patterns: ${hl("case x => ...")}. + | It matches any value, and binds the variable name to that value. + | A special case is the wild-card pattern _ which is treated as if it was a fresh + | variable on each occurrence. + | + |- Typed Patterns: ${hl("case x: Int => ...")} or ${hl("case _: Int => ...")}. + | This pattern matches any value matched by the specified type; it binds the variable + | name to that value. + | + |- Literal Patterns: ${hl("case 123 => ...")} or ${hl("case 'A' => ...")}. + | This type of pattern matches any value that is equal to the specified literal. + | + |- Stable Identifier Patterns: + | + | $sipCode + | + | the match succeeds only if the x argument and the y argument of f are equal. + | + |- Constructor Patterns: + | + | $constructorPatternsCode + | + | The pattern binds all object's fields to the variable names (name and age, in this + | case). + | + |- Tuple Patterns: + | + | $tupplePatternsCode + | + | Calling: + | + | ${hl("""swap(("Luftballons", 99)""")} + | + | would give ${hl("""(99, "Luftballons")""")} as a result. + | + |- Pattern Sequences: + | + | $patternSequencesCode + | + | Calling: + | + | ${hl("getSecondValue(List(1, 10, 2))")} + | + | would give 10 as a result. + | This pattern is possible because a companion object for the List class has a method + | with the following signature: + | + | ${hl("def unapplySeq[A](x: List[A]): Some[List[A]]")} + |""" } +} + +class PkgDuplicateSymbol(existing: Symbol)(using Context) +extends NamingMsg(PkgDuplicateSymbolID) { + def msg(using Context) = i"Trying to define package with same name as $existing" + def explain(using Context) = "" +} + +class ExistentialTypesNoLongerSupported()(using Context) +extends SyntaxMsg(ExistentialTypesNoLongerSupportedID) { + def msg(using Context) = + i"""|Existential types are no longer supported - + |use a wildcard or dependent type instead""" + def explain(using Context) = + i"""|The use of existential types is no longer supported. + | + |You should use a wildcard or dependent type instead. + | + |For example: + | + |Instead of using ${hl("forSome")} to specify a type variable + | + |${hl("List[T forSome { type T }]")} + | + |Try using a wildcard type variable + | + |${hl("List[?]")} + |""" +} + +class UnboundWildcardType()(using Context) +extends SyntaxMsg(UnboundWildcardTypeID) { + def msg(using Context) = "Unbound wildcard type" + def explain(using Context) = + i"""|The wildcard type syntax (${hl("_")}) was used where it could not be bound. + |Replace ${hl("_")} with a non-wildcard type. If the type doesn't matter, + |try replacing ${hl("_")} with ${hl("Any")}. + | + |Examples: + | + |- Parameter lists + | + | Instead of: + | ${hl("def foo(x: _) = ...")} + | + | Use ${hl("Any")} if the type doesn't matter: + | ${hl("def foo(x: Any) = ...")} + | + |- Type arguments + | + | Instead of: + | ${hl("val foo = List[?](1, 2)")} + | + | Use: + | ${hl("val foo = List[Int](1, 2)")} + | + |- Type bounds + | + | Instead of: + | ${hl("def foo[T <: _](x: T) = ...")} + | + | Remove the bounds if the type doesn't matter: + | ${hl("def foo[T](x: T) = ...")} + | + |- ${hl("val")} and ${hl("def")} types + | + | Instead of: + | ${hl("val foo: _ = 3")} + | + | Use: + | ${hl("val foo: Int = 3")} + |""" +} - class ExpectedTokenButFound(expected: Token, found: Token)(using Context) - extends SyntaxMsg(ExpectedTokenButFoundID) { +class OverridesNothing(member: Symbol)(using Context) +extends DeclarationMsg(OverridesNothingID) { + def msg(using Context) = i"""${member} overrides nothing""" - private lazy val foundText = Tokens.showToken(found) + def explain(using Context) = + i"""|There must be a field or method with the name ${member.name} in a super + |class of ${member.owner} to override it. Did you misspell it? + |Are you extending the right classes? + |""" +} + +class OverridesNothingButNameExists(member: Symbol, existing: List[Denotations.SingleDenotation])(using Context) +extends DeclarationMsg(OverridesNothingButNameExistsID) { + def msg(using Context) = + val what = + if !existing.exists(_.symbol.hasTargetName(member.targetName)) + then "target name" + else "signature" + i"""${member} has a different $what than the overridden declaration""" + def explain(using Context) = + val existingDecl: String = existing.map(_.showDcl).mkString(" \n") + i"""|There must be a non-final field or method with the name ${member.name} and the + |same parameter list in a super class of ${member.owner} to override it. + | + | ${member.showDcl} + | + |The super classes of ${member.owner} contain the following members + |named ${member.name}: + | ${existingDecl} + |""" +} + +class OverrideError( + core: Context ?=> String, base: Type, + member: Symbol, other: Symbol, + memberTp: Type, otherTp: Type)(using Context) +extends DeclarationMsg(OverrideErrorID), NoDisambiguation: + def msg(using Context) = + val isConcreteOverAbstract = + (other.owner isSubClass member.owner) && other.is(Deferred) && !member.is(Deferred) + def addendum = + if isConcreteOverAbstract then + i"""| + |(Note that ${err.infoStringWithLocation(other, base)} is abstract, + |and is therefore overridden by concrete ${err.infoStringWithLocation(member, base)})""" + else "" + i"""error overriding ${err.infoStringWithLocation(other, base)}; + | ${err.infoString(member, base, showLocation = member.owner != base.typeSymbol)} $core$addendum""" + override def canExplain = + memberTp.exists && otherTp.exists + def explain(using Context) = + if canExplain then err.whyNoMatchStr(memberTp, otherTp) else "" + +class ForwardReferenceExtendsOverDefinition(value: Symbol, definition: Symbol)(using Context) +extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID) { + def msg(using Context) = i"${definition.name} is a forward reference extending over the definition of ${value.name}" + + def explain(using Context) = + i"""|${definition.name} is used before you define it, and the definition of ${value.name} + |appears between that use and the definition of ${definition.name}. + | + |Forward references are allowed only, if there are no value definitions between + |the reference and the referred method definition. + | + |Define ${definition.name} before it is used, + |or move the definition of ${value.name} so it does not appear between + |the declaration of ${definition.name} and its use, + |or define ${value.name} as lazy. + |""" +} - def msg = - val expectedText = - if (Tokens.isIdentifier(expected)) "an identifier" - else Tokens.showToken(expected) - em"""${expectedText} expected, but ${foundText} found""" +class ExpectedTokenButFound(expected: Token, found: Token)(using Context) +extends SyntaxMsg(ExpectedTokenButFoundID) { - def explain = - if (Tokens.isIdentifier(expected) && Tokens.isKeyword(found)) - s""" - |If you want to use $foundText as identifier, you may put it in backticks: `${Tokens.tokenString(found)}`.""".stripMargin - else - "" - } + private def foundText = Tokens.showToken(found) - class MixedLeftAndRightAssociativeOps(op1: Name, op2: Name, op2LeftAssoc: Boolean)(using Context) - extends SyntaxMsg(MixedLeftAndRightAssociativeOpsID) { - def msg = - val op1Asso: String = if (op2LeftAssoc) "which is right-associative" else "which is left-associative" - val op2Asso: String = if (op2LeftAssoc) "which is left-associative" else "which is right-associative" - em"${op1} (${op1Asso}) and ${op2} ($op2Asso) have same precedence and may not be mixed" - def explain = - s"""|The operators ${op1} and ${op2} are used as infix operators in the same expression, - |but they bind to different sides: - |${op1} is applied to the operand to its ${if (op2LeftAssoc) "right" else "left"} - |${op2} is applied to the operand to its ${if (op2LeftAssoc) "left" else "right"} - |As both have the same precedence the compiler can't decide which to apply first. - | - |You may use parenthesis to make the application order explicit, - |or use method application syntax operand1.${op1}(operand2). - | - |Operators ending in a colon ${hl(":")} are right-associative. All other operators are left-associative. - | - |Infix operator precedence is determined by the operator's first character. Characters are listed - |below in increasing order of precedence, with characters on the same line having the same precedence. - | (all letters) - | | - | ^ - | & - | = ! - | < > - | : - | + - - | * / % - | (all other special characters) - |Operators starting with a letter have lowest precedence, followed by operators starting with `|`, etc. - |""".stripMargin - } + def msg(using Context) = + val expectedText = + if (Tokens.isIdentifier(expected)) "an identifier" + else Tokens.showToken(expected) + i"""${expectedText} expected, but ${foundText} found""" - class CantInstantiateAbstractClassOrTrait(cls: Symbol, isTrait: Boolean)(using Context) - extends TypeMsg(CantInstantiateAbstractClassOrTraitID) { - private val traitOrAbstract = if (isTrait) "a trait" else "abstract" - def msg = em"""${cls.name} is ${traitOrAbstract}; it cannot be instantiated""" - def explain = - em"""|Abstract classes and traits need to be extended by a concrete class or object - |to make their functionality accessible. - | - |You may want to create an anonymous class extending ${cls.name} with - | ${s"class ${cls.name} { }"} - | - |or add a companion object with - | ${s"object ${cls.name} extends ${cls.name}"} - | - |You need to implement any abstract members in both cases. - |""".stripMargin - } - - class UnreducibleApplication(tycon: Type)(using Context) extends TypeMsg(UnreducibleApplicationID): - def msg = em"unreducible application of higher-kinded type $tycon to wildcard arguments" - def explain = - em"""|An abstract type constructor cannot be applied to wildcard arguments. - |Such applications are equivalent to existential types, which are not - |supported in Scala 3.""" - - class OverloadedOrRecursiveMethodNeedsResultType(cycleSym: Symbol)(using Context) - extends CyclicMsg(OverloadedOrRecursiveMethodNeedsResultTypeID) { - def msg = em"""Overloaded or recursive $cycleSym needs return type""" - def explain = - em"""Case 1: $cycleSym is overloaded - |If there are multiple methods named $cycleSym and at least one definition of - |it calls another, you need to specify the calling method's return type. - | - |Case 2: $cycleSym is recursive - |If $cycleSym calls itself on any path (even through mutual recursion), you need to specify the return type - |of $cycleSym or of a definition it's mutually recursive with. - |""".stripMargin - } - - class RecursiveValueNeedsResultType(cycleSym: Symbol)(using Context) - extends CyclicMsg(RecursiveValueNeedsResultTypeID) { - def msg = em"""Recursive $cycleSym needs type""" - def explain = - em"""The definition of $cycleSym is recursive and you need to specify its type. - |""".stripMargin - } - - class CyclicReferenceInvolving(denot: SymDenotation)(using Context) - extends CyclicMsg(CyclicReferenceInvolvingID) { - def msg = - val where = if denot.exists then s" involving $denot" else "" - em"Cyclic reference$where" - def explain = - em"""|$denot is declared as part of a cycle which makes it impossible for the - |compiler to decide upon ${denot.name}'s type. - |To avoid this error, try giving ${denot.name} an explicit type. - |""".stripMargin - } + def explain(using Context) = + if (Tokens.isIdentifier(expected) && Tokens.isKeyword(found)) + s""" + |If you want to use $foundText as identifier, you may put it in backticks: `${Tokens.tokenString(found)}`.""".stripMargin + else + "" +} + +class MixedLeftAndRightAssociativeOps(op1: Name, op2: Name, op2LeftAssoc: Boolean)(using Context) +extends SyntaxMsg(MixedLeftAndRightAssociativeOpsID) { + def msg(using Context) = + val op1Asso: String = if (op2LeftAssoc) "which is right-associative" else "which is left-associative" + val op2Asso: String = if (op2LeftAssoc) "which is left-associative" else "which is right-associative" + i"${op1} (${op1Asso}) and ${op2} ($op2Asso) have same precedence and may not be mixed" + def explain(using Context) = + s"""|The operators ${op1} and ${op2} are used as infix operators in the same expression, + |but they bind to different sides: + |${op1} is applied to the operand to its ${if (op2LeftAssoc) "right" else "left"} + |${op2} is applied to the operand to its ${if (op2LeftAssoc) "left" else "right"} + |As both have the same precedence the compiler can't decide which to apply first. + | + |You may use parenthesis to make the application order explicit, + |or use method application syntax operand1.${op1}(operand2). + | + |Operators ending in a colon ${hl(":")} are right-associative. All other operators are left-associative. + | + |Infix operator precedence is determined by the operator's first character. Characters are listed + |below in increasing order of precedence, with characters on the same line having the same precedence. + | (all letters) + | | + | ^ + | & + | = ! + | < > + | : + | + - + | * / % + | (all other special characters) + |Operators starting with a letter have lowest precedence, followed by operators starting with `|`, etc. + |""".stripMargin +} + +class CantInstantiateAbstractClassOrTrait(cls: Symbol, isTrait: Boolean)(using Context) +extends TypeMsg(CantInstantiateAbstractClassOrTraitID) { + private val traitOrAbstract = if (isTrait) "a trait" else "abstract" + def msg(using Context) = i"""${cls.name} is ${traitOrAbstract}; it cannot be instantiated""" + def explain(using Context) = + i"""|Abstract classes and traits need to be extended by a concrete class or object + |to make their functionality accessible. + | + |You may want to create an anonymous class extending ${cls.name} with + | ${s"class ${cls.name} { }"} + | + |or add a companion object with + | ${s"object ${cls.name} extends ${cls.name}"} + | + |You need to implement any abstract members in both cases. + |""" +} + +class UnreducibleApplication(tycon: Type)(using Context) extends TypeMsg(UnreducibleApplicationID): + def msg(using Context) = i"unreducible application of higher-kinded type $tycon to wildcard arguments" + def explain(using Context) = + i"""|An abstract type constructor cannot be applied to wildcard arguments. + |Such applications are equivalent to existential types, which are not + |supported in Scala 3.""" + +class OverloadedOrRecursiveMethodNeedsResultType(cycleSym: Symbol)(using Context) +extends CyclicMsg(OverloadedOrRecursiveMethodNeedsResultTypeID) { + def msg(using Context) = i"""Overloaded or recursive $cycleSym needs return type""" + def explain(using Context) = + i"""Case 1: $cycleSym is overloaded + |If there are multiple methods named $cycleSym and at least one definition of + |it calls another, you need to specify the calling method's return type. + | + |Case 2: $cycleSym is recursive + |If $cycleSym calls itself on any path (even through mutual recursion), you need to specify the return type + |of $cycleSym or of a definition it's mutually recursive with. + |""" +} - class CyclicReferenceInvolvingImplicit(cycleSym: Symbol)(using Context) - extends CyclicMsg(CyclicReferenceInvolvingImplicitID) { - def msg = em"""Cyclic reference involving implicit $cycleSym""" - def explain = - em"""|$cycleSym is declared as part of a cycle which makes it impossible for the - |compiler to decide upon ${cycleSym.name}'s type. - |This might happen when the right hand-side of $cycleSym's definition involves an implicit search. - |To avoid this error, try giving ${cycleSym.name} an explicit type. - |""".stripMargin - } +class RecursiveValueNeedsResultType(cycleSym: Symbol)(using Context) +extends CyclicMsg(RecursiveValueNeedsResultTypeID) { + def msg(using Context) = i"""Recursive $cycleSym needs type""" + def explain(using Context) = + i"""The definition of $cycleSym is recursive and you need to specify its type. + |""" +} + +class CyclicReferenceInvolving(denot: SymDenotation)(using Context) +extends CyclicMsg(CyclicReferenceInvolvingID) { + def msg(using Context) = + val where = if denot.exists then s" involving $denot" else "" + i"Cyclic reference$where" + def explain(using Context) = + i"""|$denot is declared as part of a cycle which makes it impossible for the + |compiler to decide upon ${denot.name}'s type. + |To avoid this error, try giving ${denot.name} an explicit type. + |""" +} + +class CyclicReferenceInvolvingImplicit(cycleSym: Symbol)(using Context) +extends CyclicMsg(CyclicReferenceInvolvingImplicitID) { + def msg(using Context) = i"""Cyclic reference involving implicit $cycleSym""" + def explain(using Context) = + i"""|$cycleSym is declared as part of a cycle which makes it impossible for the + |compiler to decide upon ${cycleSym.name}'s type. + |This might happen when the right hand-side of $cycleSym's definition involves an implicit search. + |To avoid this error, try giving ${cycleSym.name} an explicit type. + |""" +} - class SkolemInInferred(tree: tpd.Tree, pt: Type, argument: tpd.Tree)(using Context) - extends TypeMsg(SkolemInInferredID): - private def argStr = +class SkolemInInferred(tree: tpd.Tree, pt: Type, argument: tpd.Tree)(using Context) +extends TypeMsg(SkolemInInferredID): + def msg(using Context) = + def argStr = if argument.isEmpty then "" else i" from argument of type ${argument.tpe.widen}" - def msg = - em"""Failure to generate given instance for type $pt$argStr) - | - |I found: $tree - |But the part corresponding to `` is not a reference that can be generated. - |This might be because resolution yielded as given instance a function that is not - |known to be total and side-effect free.""" - def explain = - em"""The part of given resolution that corresponds to `` produced a term that - |is not a stable reference. Therefore a given instance could not be generated. - | - |To trouble-shoot the problem, try to supply an explicit expression instead of - |relying on implicit search at this point.""" - - class SuperQualMustBeParent(qual: untpd.Ident, cls: ClassSymbol)(using Context) - extends ReferenceMsg(SuperQualMustBeParentID) { - def msg = em"""|$qual does not name a parent of $cls""" - def explain = - val parents: Seq[String] = (cls.info.parents map (_.typeSymbol.name.show)).sorted - em"""|When a qualifier ${hl("T")} is used in a ${hl("super")} prefix of the form ${hl("C.super[T]")}, - |${hl("T")} must be a parent type of ${hl("C")}. - | - |In this case, the parents of $cls are: - |${parents.mkString(" - ", "\n - ", "")} - |""".stripMargin - } - - class VarArgsParamMustComeLast()(using Context) - extends SyntaxMsg(VarArgsParamMustComeLastID) { - def msg = em"""${hl("varargs")} parameter must come last""" - def explain = - em"""|The ${hl("varargs")} field must be the last field in the method signature. - |Attempting to define a field in a method signature after a ${hl("varargs")} field is an error. - |""" - } - - import typer.Typer.BindingPrec - - class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(using Context) - extends ReferenceMsg(AmbiguousReferenceID) { - - /** A string which explains how something was bound; Depending on `prec` this is either - * imported by - * or defined in - */ - private def bindingString(prec: BindingPrec, whereFound: Context, qualifier: String = "") = { - val howVisible = prec match { - case BindingPrec.Definition => "defined" - case BindingPrec.Inheritance => "inherited" - case BindingPrec.NamedImport => "imported by name" - case BindingPrec.WildImport => "imported" - case BindingPrec.PackageClause => "found" - case BindingPrec.NothingBound => assert(false) - } - if (prec.isImportPrec) { - ex"""$howVisible$qualifier by ${em"${whereFound.importInfo}"}""" - } else - ex"""$howVisible$qualifier in ${em"${whereFound.owner}"}""" - } - - def msg = - i"""|Reference to ${em"$name"} is ambiguous, - |it is both ${bindingString(newPrec, ctx)} - |and ${bindingString(prevPrec, prevCtx, " subsequently")}""" - - def explain = - em"""|The compiler can't decide which of the possible choices you - |are referencing with $name: A definition of lower precedence - |in an inner scope, or a definition with higher precedence in - |an outer scope. - |Note: - | - Definitions in an enclosing scope take precedence over inherited definitions - | - Definitions take precedence over imports - | - Named imports take precedence over wildcard imports - | - You may replace a name when imported using - | ${hl("import")} scala.{ $name => ${name.show + "Tick"} } - |""" - } - - class MethodDoesNotTakeParameters(tree: tpd.Tree)(using Context) - extends TypeMsg(MethodDoesNotTakeParametersId) { - def methodSymbol: Symbol = - def recur(t: tpd.Tree): Symbol = - val sym = tpd.methPart(t).symbol - if sym == defn.Any_typeCast then - t match - case TypeApply(Select(qual, _), _) => recur(qual) - case _ => sym - else sym - recur(tree) - - def msg = { - val more = if (tree.isInstanceOf[tpd.Apply]) " more" else "" - val meth = methodSymbol - val methStr = if (meth.exists) meth.showLocated else "expression" - em"$methStr does not take$more parameters" - } - - def explain = { - val isNullary = methodSymbol.info.isInstanceOf[ExprType] - val addendum = - if (isNullary) "\nNullary methods may not be called with parenthesis" - else "" - - "You have specified more parameter lists than defined in the method definition(s)." + addendum + i"""Failure to generate given instance for type $pt$argStr) + | + |I found: $tree + |But the part corresponding to `` is not a reference that can be generated. + |This might be because resolution yielded as given instance a function that is not + |known to be total and side-effect free.""" + def explain(using Context) = + i"""The part of given resolution that corresponds to `` produced a term that + |is not a stable reference. Therefore a given instance could not be generated. + | + |To trouble-shoot the problem, try to supply an explicit expression instead of + |relying on implicit search at this point.""" + +class SuperQualMustBeParent(qual: untpd.Ident, cls: ClassSymbol)(using Context) +extends ReferenceMsg(SuperQualMustBeParentID) { + def msg(using Context) = i"""|$qual does not name a parent of $cls""" + def explain(using Context) = + val parents: Seq[String] = (cls.info.parents map (_.typeSymbol.name.show)).sorted + i"""|When a qualifier ${hl("T")} is used in a ${hl("super")} prefix of the form ${hl("C.super[T]")}, + |${hl("T")} must be a parent type of ${hl("C")}. + | + |In this case, the parents of $cls are: + |${parents.mkString(" - ", "\n - ", "")} + |""" +} + +class VarArgsParamMustComeLast()(using Context) +extends SyntaxMsg(VarArgsParamMustComeLastID) { + def msg(using Context) = i"""${hl("varargs")} parameter must come last""" + def explain(using Context) = + i"""|The ${hl("varargs")} field must be the last field in the method signature. + |Attempting to define a field in a method signature after a ${hl("varargs")} field is an error. + |""" +} + +import typer.Typer.BindingPrec + +class ConstrProxyShadows(proxy: TermRef, shadowed: Type, shadowedIsApply: Boolean)(using Context) + extends ReferenceMsg(ConstrProxyShadowsID), NoDisambiguation: + + def clsString(using Context) = proxy.symbol.companionClass.showLocated + def shadowedString(using Context) = shadowed.termSymbol.showLocated + def appClause = if shadowedIsApply then " the apply method of" else "" + def appSuffix = if shadowedIsApply then ".apply" else "" + + def msg(using Context) = + i"""Reference to constructor proxy for $clsString + |shadows outer reference to $shadowedString + | + |The instance needs to be created with an explicit `new`.""" + + def explain(using Context) = + i"""There is an ambiguity in the meaning of the call + | + | ${proxy.symbol.name}(...) + | + |It could mean creating an instance of $clsString with + | + | new ${proxy.symbol.companionClass.name}(...) + | + |Or it could mean calling$appClause $shadowedString as in + | + | ${shadowed.termSymbol.name}$appSuffix(...) + | + |To disambiguate, use an explicit `new` if you mean the former, + |or use a full prefix for ${shadowed.termSymbol.name} if you mean the latter.""" +end ConstrProxyShadows + +class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(using Context) + extends ReferenceMsg(AmbiguousReferenceID), NoDisambiguation { + + /** A string which explains how something was bound; Depending on `prec` this is either + * imported by + * or defined in + */ + private def bindingString(prec: BindingPrec, whereFound: Context, qualifier: String = "")(using Context) = { + val howVisible = prec match { + case BindingPrec.Definition => "defined" + case BindingPrec.Inheritance => "inherited" + case BindingPrec.NamedImport => "imported by name" + case BindingPrec.WildImport => "imported" + case BindingPrec.PackageClause => "found" + case BindingPrec.NothingBound => assert(false) } - - } - - class AmbiguousOverload(tree: tpd.Tree, val alternatives: List[SingleDenotation], pt: Type, addendum: String = "")( - implicit ctx: Context) - extends ReferenceMsg(AmbiguousOverloadID) { - private def all = if (alternatives.length == 2) "both" else "all" - def msg = - em"""|Ambiguous overload. The ${err.overloadedAltsStr(alternatives)} - |$all match ${err.expectedTypeStr(pt)}$addendum""".stripMargin - def explain = - em"""|There are ${alternatives.length} methods that could be referenced as the compiler knows too little - |about the expected type. - |You may specify the expected type e.g. by - |- assigning it to a value with a specified type, or - |- adding a type ascription as in ${hl("instance.myMethod: String => Int")} - |""" - } - - class ReassignmentToVal(name: Name)(using Context) - extends TypeMsg(ReassignmentToValID) { - def msg = em"""Reassignment to val $name""" - def explain = - em"""|You can not assign a new value to $name as values can't be changed. - |Keep in mind that every statement has a value, so you may e.g. use - | ${hl("val")} $name ${hl("= if (condition) 2 else 5")} - |In case you need a reassignable name, you can declare it as - |variable - | ${hl("var")} $name ${hl("=")} ... - |""".stripMargin - } - - class TypeDoesNotTakeParameters(tpe: Type, params: List[Trees.Tree[Trees.Untyped]])(using Context) - extends TypeMsg(TypeDoesNotTakeParametersID) { - private def fboundsAddendum = - if tpe.typeSymbol.isAllOf(Provisional | TypeParam) then - "\n(Note that F-bounds of type parameters may not be type lambdas)" - else "" - def msg = em"$tpe does not take type parameters$fboundsAddendum" - def explain = - val ps = - if (params.size == 1) s"a type parameter ${params.head}" - else s"type parameters ${params.map(_.show).mkString(", ")}" - i"""You specified ${NoColor(ps)} for ${em"$tpe"}, which is not - |declared to take any. - |""" - } - - class VarValParametersMayNotBeCallByName(name: TermName, mutable: Boolean)(using Context) - extends SyntaxMsg(VarValParametersMayNotBeCallByNameID) { - def varOrVal = if (mutable) em"${hl("var")}" else em"${hl("val")}" - def msg = s"$varOrVal parameters may not be call-by-name" - def explain = - em"""${hl("var")} and ${hl("val")} parameters of classes and traits may no be call-by-name. In case you - |want the parameter to be evaluated on demand, consider making it just a parameter - |and a ${hl("def")} in the class such as - | ${s"class MyClass(${name}Tick: => String) {"} - | ${s" def $name() = ${name}Tick"} - | ${hl("}")} - |""" - } - - class MissingTypeParameterFor(tpe: Type)(using Context) - extends SyntaxMsg(MissingTypeParameterForID) { - def msg = - if (tpe.derivesFrom(defn.AnyKindClass)) em"${tpe} cannot be used as a value type" - else em"Missing type parameter for ${tpe}" - def explain = "" - } - - class MissingTypeParameterInTypeApp(tpe: Type)(using Context) - extends TypeMsg(MissingTypeParameterInTypeAppID) { - def numParams = tpe.typeParams.length - def parameters = if (numParams == 1) "parameter" else "parameters" - def msg = em"Missing type $parameters for $tpe" - def explain = em"A fully applied type is expected but $tpe takes $numParams $parameters" - } - - class MissingArgument(pname: Name, methString: String)(using Context) - extends TypeMsg(MissingArgumentID): - def msg = - if pname.firstPart contains '$' then s"not enough arguments for $methString" - else s"missing argument for parameter $pname of $methString" - def explain = "" - - class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(using Context) - extends TypeMismatchMsg( - if which == "lower" then bound else tpe, - if which == "lower" then tpe else bound)(DoesNotConformToBoundID): - private def isBounds = tpe match - case TypeBounds(lo, hi) => lo ne hi - case _ => false - override def canExplain = !isBounds - def msg = - if isBounds then - em"Type argument ${tpe} does not overlap with $which bound $bound" - else - em"Type argument ${tpe} does not conform to $which bound $bound" - - class DoesNotConformToSelfType(category: String, selfType: Type, cls: Symbol, - otherSelf: Type, relation: String, other: Symbol)( - implicit ctx: Context) - extends TypeMismatchMsg(selfType, otherSelf)(DoesNotConformToSelfTypeID) { - def msg = em"""$category: self type $selfType of $cls does not conform to self type $otherSelf - |of $relation $other""" - } - - class DoesNotConformToSelfTypeCantBeInstantiated(tp: Type, selfType: Type)( - implicit ctx: Context) - extends TypeMismatchMsg(tp, selfType)(DoesNotConformToSelfTypeCantBeInstantiatedID) { - def msg = em"""$tp does not conform to its self type $selfType; cannot be instantiated""" - } - - class IllegalParameterInit(found: Type, expected: Type, param: Symbol, cls: Symbol)(using Context) - extends TypeMismatchMsg(found, expected)(IllegalParameterInitID): - def msg = - em"""illegal parameter initialization of $param. - | - | The argument passed for $param has type: $found - | but $cls expects $param to have type: $expected""" - - class AbstractMemberMayNotHaveModifier(sym: Symbol, flag: FlagSet)( - implicit ctx: Context) - extends SyntaxMsg(AbstractMemberMayNotHaveModifierID) { - def msg = em"""${hl("abstract")} $sym may not have `${flag.flagsString}` modifier""" - def explain = "" - } - - class TypesAndTraitsCantBeImplicit()(using Context) - extends SyntaxMsg(TypesAndTraitsCantBeImplicitID) { - def msg = em"""${hl("implicit")} modifier cannot be used for types or traits""" - def explain = "" - } - - class OnlyClassesCanBeAbstract(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(OnlyClassesCanBeAbstractID) { - def explain = "" - def msg = em"""${hl("abstract")} modifier can be used only for classes; it should be omitted for abstract members""" - } - - class AbstractOverrideOnlyInTraits(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(AbstractOverrideOnlyInTraitsID) { - def msg = em"""${hl("abstract override")} modifier only allowed for members of traits""" - def explain = "" - } - - class TraitsMayNotBeFinal(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(TraitsMayNotBeFinalID) { - def msg = em"""$sym may not be ${hl("final")}""" - def explain = - "A trait can never be final since it is abstract and must be extended to be useful." - } - - class NativeMembersMayNotHaveImplementation(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(NativeMembersMayNotHaveImplementationID) { - def msg = em"""${hl("@native")} members may not have an implementation""" - def explain = "" - } - - class TraitMayNotDefineNativeMethod(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(TraitMayNotDefineNativeMethodID) { - def msg = em"""A trait cannot define a ${hl("@native")} method.""" - def explain = "" - } - - class OnlyClassesCanHaveDeclaredButUndefinedMembers(sym: Symbol)( - implicit ctx: Context) - extends SyntaxMsg(OnlyClassesCanHaveDeclaredButUndefinedMembersID) { - - private def varNote = - if (sym.is(Mutable)) "Note that variables need to be initialized to be defined." + if (prec.isImportPrec) { + i"""$howVisible$qualifier by ${whereFound.importInfo}""" + } else + i"""$howVisible$qualifier in ${whereFound.owner}""" + } + + def msg(using Context) = + i"""|Reference to $name is ambiguous. + |It is both ${bindingString(newPrec, ctx)} + |and ${bindingString(prevPrec, prevCtx, " subsequently")}""" + + def explain(using Context) = + val precedent = + if newPrec == prevPrec then """two name bindings of equal precedence + |were introduced in the same scope.""".stripMargin + else """a name binding of lower precedence + |in an inner scope cannot shadow a binding with higher precedence in + |an outer scope.""".stripMargin + + i"""|The identifier $name is ambiguous because $precedent + | + |The precedence of the different kinds of name bindings, from highest to lowest, is: + | - Definitions in an enclosing scope + | - Inherited definitions and top-level definitions in packages + | - Names introduced by import of a specific name + | - Names introduced by wildcard import + | - Definitions from packages in other files + |Note: + | - As a rule, definitions take precedence over imports. + | - Definitions in an enclosing scope take precedence over inherited definitions, + | which can result in ambiguities in nested classes. + | - When importing, you can avoid naming conflicts by renaming: + | ${hl("import")} scala.{$name => ${name.show}Tick} + |""" +} + +class MethodDoesNotTakeParameters(tree: tpd.Tree)(using Context) +extends TypeMsg(MethodDoesNotTakeParametersId) { + def methodSymbol(using Context): Symbol = + def recur(t: tpd.Tree): Symbol = + val sym = tpd.methPart(t).symbol + if sym == defn.Any_typeCast then + t match + case TypeApply(Select(qual, _), _) => recur(qual) + case _ => sym + else sym + recur(tree) + + def msg(using Context) = { + val more = if (tree.isInstanceOf[tpd.Apply]) " more" else "" + val meth = methodSymbol + val methStr = if (meth.exists) meth.showLocated else "expression" + i"$methStr does not take$more parameters" + } + + def explain(using Context) = { + val isNullary = methodSymbol.info.isInstanceOf[ExprType] + val addendum = + if (isNullary) "\nNullary methods may not be called with parenthesis" else "" - def msg = em"""Declaration of $sym not allowed here: only classes can have declared but undefined members""" - def explain = s"$varNote" - } - - class CannotExtendAnyVal(sym: Symbol)(using Context) - extends SyntaxMsg(CannotExtendAnyValID) { - def msg = em"""$sym cannot extend ${hl("AnyVal")}""" - def explain = - em"""Only classes (not traits) are allowed to extend ${hl("AnyVal")}, but traits may extend - |${hl("Any")} to become ${Green("\"universal traits\"")} which may only have ${hl("def")} members. - |Universal traits can be mixed into classes that extend ${hl("AnyVal")}. - |""" - } - - class CannotExtendJavaEnum(sym: Symbol)(using Context) - extends SyntaxMsg(CannotExtendJavaEnumID) { - def msg = em"""$sym cannot extend ${hl("java.lang.Enum")}: only enums defined with the ${hl("enum")} syntax can""" - def explain = "" - } - - class CannotExtendContextFunction(sym: Symbol)(using Context) - extends SyntaxMsg(CannotExtendFunctionID) { - def msg = em"""$sym cannot extend a context function class""" - def explain = "" - } - - class JavaEnumParentArgs(parent: Type)(using Context) - extends TypeMsg(JavaEnumParentArgsID) { - def msg = em"""not enough arguments for constructor Enum: ${hl("(name: String, ordinal: Int)")}: ${hl(parent.show)}""" - def explain = "" - } - - class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(using Context) - extends SyntaxMsg(CannotHaveSameNameAsID) { - import CannotHaveSameNameAs._ - def reasonMessage: String = reason match { - case CannotBeOverridden => "class definitions cannot be overridden" - case DefinedInSelf(self) => - s"""cannot define ${sym.showKind} member with the same name as a ${cls.showKind} member in self reference ${self.name}. - |(Note: this can be resolved by using another name) - |""".stripMargin - } - - def msg = em"""$sym cannot have the same name as ${cls.showLocated} -- """ + reasonMessage - def explain = "" - } - object CannotHaveSameNameAs { - sealed trait Reason - case object CannotBeOverridden extends Reason - case class DefinedInSelf(self: tpd.ValDef) extends Reason - } - - class ValueClassesMayNotDefineInner(valueClass: Symbol, inner: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotDefineInnerID) { - def msg = em"""Value classes may not define an inner class""" - def explain = "" - } - - class ValueClassesMayNotDefineNonParameterField(valueClass: Symbol, field: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotDefineNonParameterFieldID) { - def msg = em"""Value classes may not define non-parameter field""" - def explain = "" - } - - class ValueClassesMayNotDefineASecondaryConstructor(valueClass: Symbol, constructor: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotDefineASecondaryConstructorID) { - def msg = em"""Value classes may not define a secondary constructor""" - def explain = "" - } - - class ValueClassesMayNotContainInitalization(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotContainInitalizationID) { - def msg = em"""Value classes may not contain initialization statements""" - def explain = "" - } - - class ValueClassesMayNotBeAbstract(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotBeAbstractID) { - def msg = em"""Value classes may not be ${hl("abstract")}""" - def explain = "" - } - - class ValueClassesMayNotBeContainted(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotBeContaintedID) { - private def localOrMember = if (valueClass.owner.isTerm) "local class" else "member of another class" - def msg = s"""Value classes may not be a $localOrMember""" - def explain = "" - } - - class ValueClassesMayNotWrapAnotherValueClass(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotWrapAnotherValueClassID) { - def msg = """A value class may not wrap another user-defined value class""" - def explain = "" - } - - class ValueClassParameterMayNotBeAVar(valueClass: Symbol, param: Symbol)(using Context) - extends SyntaxMsg(ValueClassParameterMayNotBeAVarID) { - def msg = em"""A value class parameter may not be a ${hl("var")}""" - def explain = - em"""A value class must have exactly one ${hl("val")} parameter.""" - } - - class ValueClassNeedsOneValParam(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassNeedsExactlyOneValParamID) { - def msg = em"""Value class needs one ${hl("val")} parameter""" - def explain = "" - } - - class ValueClassParameterMayNotBeCallByName(valueClass: Symbol, param: Symbol)(using Context) - extends SyntaxMsg(ValueClassParameterMayNotBeCallByNameID) { - def msg = s"Value class parameter `${param.name}` may not be call-by-name" - def explain = "" - } - - class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context) - extends SyntaxMsg(SuperCallsNotAllowedInlineableID) { - def msg = em"Super call not allowed in inlineable $symbol" - def explain = "Method inlining prohibits calling superclass methods, as it may lead to confusion about which super is being called." - } - - class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathID): - def msg = em"$tp is not a valid $usage, since it is not an immutable path" - def explain = - i"""An immutable path is - | - a reference to an immutable value, or - | - a reference to `this`, or - | - a selection of an immutable path with an immutable value.""" - - class WrongNumberOfParameters(expected: Int)(using Context) - extends SyntaxMsg(WrongNumberOfParametersID) { - def msg = s"Wrong number of parameters, expected: $expected" - def explain = "" - } - - class DuplicatePrivateProtectedQualifier()(using Context) - extends SyntaxMsg(DuplicatePrivateProtectedQualifierID) { - def msg = "Duplicate private/protected qualifier" - def explain = - em"It is not allowed to combine `private` and `protected` modifiers even if they are qualified to different scopes" - } - - class ExpectedStartOfTopLevelDefinition()(using Context) - extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID) { - def msg = "Expected start of definition" - def explain = - em"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after qualifiers" - } - - class NoReturnFromInlineable(owner: Symbol)(using Context) - extends SyntaxMsg(NoReturnFromInlineableID) { - def msg = em"No explicit ${hl("return")} allowed from inlineable $owner" - def explain = - em"""Methods marked with ${hl("inline")} modifier may not use ${hl("return")} statements. - |Instead, you should rely on the last expression's value being - |returned from a method. - |""" - } - - class ReturnOutsideMethodDefinition(owner: Symbol)(using Context) - extends SyntaxMsg(ReturnOutsideMethodDefinitionID) { - def msg = em"${hl("return")} outside method definition" - def explain = - em"""You used ${hl("return")} in ${owner}. - |${hl("return")} is a keyword and may only be used within method declarations. - |""" - } - - class ExtendFinalClass(clazz:Symbol, finalClazz: Symbol)(using Context) - extends SyntaxMsg(ExtendFinalClassID) { - def msg = em"$clazz cannot extend ${hl("final")} $finalClazz" - def explain = - em"""A class marked with the ${hl("final")} keyword cannot be extended""" - } - - class ExpectedTypeBoundOrEquals(found: Token)(using Context) - extends SyntaxMsg(ExpectedTypeBoundOrEqualsID) { - def msg = em"${hl("=")}, ${hl(">:")}, or ${hl("<:")} expected, but ${Tokens.showToken(found)} found" - - def explain = - em"""Type parameters and abstract types may be constrained by a type bound. - |Such type bounds limit the concrete values of the type variables and possibly - |reveal more information about the members of such types. - | - |A lower type bound ${hl("B >: A")} expresses that the type variable ${hl("B")} - |refers to a supertype of type ${hl("A")}. - | - |An upper type bound ${hl("T <: A")} declares that type variable ${hl("T")} - |refers to a subtype of type ${hl("A")}. - |""" - } - - class ClassAndCompanionNameClash(cls: Symbol, other: Symbol)(using Context) - extends NamingMsg(ClassAndCompanionNameClashID) { - def msg = - val name = cls.name.stripModuleClassSuffix - em"Name clash: both ${cls.owner} and its companion object defines $name" - def explain = - em"""|A ${cls.kindString} and its companion object cannot both define a ${hl("class")}, ${hl("trait")} or ${hl("object")} with the same name: - | - ${cls.owner} defines ${cls} - | - ${other.owner} defines ${other}""" - } - - class TailrecNotApplicable(symbol: Symbol)(using Context) - extends SyntaxMsg(TailrecNotApplicableID) { - def msg = { - val reason = - if (!symbol.is(Method)) em"$symbol isn't a method" - else if (symbol.is(Deferred)) em"$symbol is abstract" - else if (!symbol.isEffectivelyFinal) em"$symbol is neither ${hl("private")} nor ${hl("final")} so can be overridden" - else em"$symbol contains no recursive calls" - s"TailRec optimisation not applicable, $reason" - } - def explain = "" - } - - class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol], classRoot: Symbol)(using Context) - extends Message(FailureToEliminateExistentialID) { - def kind = MessageKind.Compatibility - def msg = - val originalType = ctx.printer.dclsText(boundSyms, "; ").show - em"""An existential type that came from a Scala-2 classfile for $classRoot - |cannot be mapped accurately to a Scala-3 equivalent. - |original type : $tp forSome ${originalType} - |reduces to : $tp1 - |type used instead: $tp2 - |This choice can cause follow-on type errors or hide type errors. - |Proceed at own risk.""" - def explain = - em"""Existential types in their full generality are no longer supported. - |Scala-3 does applications of class types to wildcard type arguments. - |Other forms of existential types that come from Scala-2 classfiles - |are only approximated in a best-effort way.""" + "You have specified more parameter lists than defined in the method definition(s)." + addendum } - class OnlyFunctionsCanBeFollowedByUnderscore(tp: Type)(using Context) - extends SyntaxMsg(OnlyFunctionsCanBeFollowedByUnderscoreID) { - def msg = em"Only function types can be followed by ${hl("_")} but the current expression has type $tp" - def explain = - em"""The syntax ${hl("x _")} is no longer supported if ${hl("x")} is not a function. - |To convert to a function value, you need to explicitly write ${hl("() => x")}""" - } +} - class MissingEmptyArgumentList(method: String)(using Context) - extends SyntaxMsg(MissingEmptyArgumentListID) { - def msg = em"$method must be called with ${hl("()")} argument" - def explain = { - val codeExample = - """def next(): T = ... - |next // is expanded to next()""" +class AmbiguousOverload(tree: tpd.Tree, val alternatives: List[SingleDenotation], pt: Type, addendum: String = "")( + implicit ctx: Context) +extends ReferenceMsg(AmbiguousOverloadID), NoDisambiguation { + private def all = if (alternatives.length == 2) "both" else "all" + def msg(using Context) = + i"""|Ambiguous overload. The ${err.overloadedAltsStr(alternatives)} + |$all match ${err.expectedTypeStr(pt)}$addendum""" + def explain(using Context) = + i"""|There are ${alternatives.length} methods that could be referenced as the compiler knows too little + |about the expected type. + |You may specify the expected type e.g. by + |- assigning it to a value with a specified type, or + |- adding a type ascription as in ${hl("instance.myMethod: String => Int")} + |""" +} + +class AmbiguousExtensionMethod(tree: untpd.Tree, expansion1: tpd.Tree, expansion2: tpd.Tree)(using Context) + extends ReferenceMsg(AmbiguousExtensionMethodID), NoDisambiguation: + def msg(using Context) = + i"""Ambiguous extension methods: + |both $expansion1 + |and $expansion2 + |are possible expansions of $tree""" + def explain(using Context) = "" + +class ReassignmentToVal(name: Name)(using Context) + extends TypeMsg(ReassignmentToValID) { + def msg(using Context) = i"""Reassignment to val $name""" + def explain(using Context) = + i"""|You can not assign a new value to $name as values can't be changed. + |Keep in mind that every statement has a value, so you may e.g. use + | ${hl("val")} $name ${hl("= if (condition) 2 else 5")} + |In case you need a reassignable name, you can declare it as + |variable + | ${hl("var")} $name ${hl("=")} ... + |""" +} + +class TypeDoesNotTakeParameters(tpe: Type, params: List[untpd.Tree])(using Context) + extends TypeMsg(TypeDoesNotTakeParametersID) { + private def fboundsAddendum(using Context) = + if tpe.typeSymbol.isAllOf(Provisional | TypeParam) then + "\n(Note that F-bounds of type parameters may not be type lambdas)" + else "" + def msg(using Context) = i"$tpe does not take type parameters$fboundsAddendum" + def explain(using Context) = + val ps = + if (params.size == 1) s"a type parameter ${params.head}" + else s"type parameters ${params.map(_.show).mkString(", ")}" + i"""You specified ${NoColor(ps)} for $tpe, which is not + |declared to take any. + |""" +} + +class VarValParametersMayNotBeCallByName(name: TermName, mutable: Boolean)(using Context) + extends SyntaxMsg(VarValParametersMayNotBeCallByNameID) { + def varOrVal = if mutable then hl("var") else hl("val") + def msg(using Context) = s"$varOrVal parameters may not be call-by-name" + def explain(using Context) = + i"""${hl("var")} and ${hl("val")} parameters of classes and traits may no be call-by-name. In case you + |want the parameter to be evaluated on demand, consider making it just a parameter + |and a ${hl("def")} in the class such as + | ${s"class MyClass(${name}Tick: => String) {"} + | ${s" def $name() = ${name}Tick"} + | ${hl("}")} + |""" +} + +class MissingTypeParameterFor(tpe: Type)(using Context) + extends SyntaxMsg(MissingTypeParameterForID) { + def msg(using Context) = + if tpe.derivesFrom(defn.AnyKindClass) + then i"$tpe cannot be used as a value type" + else i"Missing type parameter for $tpe" + def explain(using Context) = "" +} + +class MissingTypeParameterInTypeApp(tpe: Type)(using Context) + extends TypeMsg(MissingTypeParameterInTypeAppID) { + def numParams = tpe.typeParams.length + def parameters = if (numParams == 1) "parameter" else "parameters" + def msg(using Context) = i"Missing type $parameters for $tpe" + def explain(using Context) = i"A fully applied type is expected but $tpe takes $numParams $parameters" +} + +class MissingArgument(pname: Name, methString: String)(using Context) + extends TypeMsg(MissingArgumentID): + def msg(using Context) = + if pname.firstPart contains '$' then s"not enough arguments for $methString" + else s"missing argument for parameter $pname of $methString" + def explain(using Context) = "" + +class MissingArgumentList(method: String, sym: Symbol)(using Context) + extends TypeMsg(MissingArgumentListID) { + def msg(using Context) = + val symDcl = if sym.exists then "\n\n " + hl(sym.showDcl(using ctx.withoutColors)) else "" + i"missing argument list for $method$symDcl" + def explain(using Context) = { + i"""Unapplied methods are only converted to functions when a function type is expected.""" + } +} + +class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(using Context) + extends TypeMismatchMsg( + if which == "lower" then bound else tpe, + if which == "lower" then tpe else bound)(DoesNotConformToBoundID): + private def isBounds = tpe match + case TypeBounds(lo, hi) => lo ne hi + case _ => false + override def canExplain = !isBounds + def msg(using Context) = + if isBounds then + i"Type argument ${tpe} does not overlap with $which bound $bound" + else + i"Type argument ${tpe} does not conform to $which bound $bound" + +class DoesNotConformToSelfType(category: String, selfType: Type, cls: Symbol, + otherSelf: Type, relation: String, other: Symbol)( + implicit ctx: Context) + extends TypeMismatchMsg(selfType, otherSelf)(DoesNotConformToSelfTypeID) { + def msg(using Context) = i"""$category: self type $selfType of $cls does not conform to self type $otherSelf + |of $relation $other""" +} + +class DoesNotConformToSelfTypeCantBeInstantiated(tp: Type, selfType: Type)( + implicit ctx: Context) + extends TypeMismatchMsg(tp, selfType)(DoesNotConformToSelfTypeCantBeInstantiatedID) { + def msg(using Context) = i"""$tp does not conform to its self type $selfType; cannot be instantiated""" +} + +class IllegalParameterInit(found: Type, expected: Type, param: Symbol, cls: Symbol)(using Context) + extends TypeMismatchMsg(found, expected)(IllegalParameterInitID): + def msg(using Context) = + i"""illegal parameter initialization of $param. + | + | The argument passed for $param has type: $found + | but $cls expects $param to have type: $expected""" + +class AbstractMemberMayNotHaveModifier(sym: Symbol, flag: FlagSet)( + implicit ctx: Context) + extends SyntaxMsg(AbstractMemberMayNotHaveModifierID) { + def msg(using Context) = i"""${hl("abstract")} $sym may not have `${flag.flagsString}` modifier""" + def explain(using Context) = "" +} + +class TypesAndTraitsCantBeImplicit()(using Context) + extends SyntaxMsg(TypesAndTraitsCantBeImplicitID) { + def msg(using Context) = i"""${hl("implicit")} modifier cannot be used for types or traits""" + def explain(using Context) = "" +} + +class OnlyClassesCanBeAbstract(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(OnlyClassesCanBeAbstractID) { + def explain(using Context) = "" + def msg(using Context) = i"""${hl("abstract")} modifier can be used only for classes; it should be omitted for abstract members""" +} + +class AbstractOverrideOnlyInTraits(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(AbstractOverrideOnlyInTraitsID) { + def msg(using Context) = i"""${hl("abstract override")} modifier only allowed for members of traits""" + def explain(using Context) = "" +} + +class TraitsMayNotBeFinal(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(TraitsMayNotBeFinalID) { + def msg(using Context) = i"""$sym may not be ${hl("final")}""" + def explain(using Context) = + "A trait can never be final since it is abstract and must be extended to be useful." +} + +class NativeMembersMayNotHaveImplementation(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(NativeMembersMayNotHaveImplementationID) { + def msg(using Context) = i"""${hl("@native")} members may not have an implementation""" + def explain(using Context) = "" +} + +class TraitMayNotDefineNativeMethod(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(TraitMayNotDefineNativeMethodID) { + def msg(using Context) = i"""A trait cannot define a ${hl("@native")} method.""" + def explain(using Context) = "" +} + +class OnlyClassesCanHaveDeclaredButUndefinedMembers(sym: Symbol)( + implicit ctx: Context) + extends SyntaxMsg(OnlyClassesCanHaveDeclaredButUndefinedMembersID) { + + def msg(using Context) = i"""Declaration of $sym not allowed here: only classes can have declared but undefined members""" + def explain(using Context) = + if sym.is(Mutable) then "Note that variables need to be initialized to be defined." + else "" +} + +class CannotExtendAnyVal(sym: Symbol)(using Context) + extends SyntaxMsg(CannotExtendAnyValID) { + def msg(using Context) = i"""$sym cannot extend ${hl("AnyVal")}""" + def explain(using Context) = + i"""Only classes (not traits) are allowed to extend ${hl("AnyVal")}, but traits may extend + |${hl("Any")} to become ${Green("\"universal traits\"")} which may only have ${hl("def")} members. + |Universal traits can be mixed into classes that extend ${hl("AnyVal")}. + |""" +} - em"""Previously an empty argument list () was implicitly inserted when calling a nullary method without arguments. E.g. - | - |$codeExample - | - |In Dotty, this idiom is an error. The application syntax has to follow exactly the parameter syntax. - |Excluded from this rule are methods that are defined in Java or that override methods defined in Java.""" - } +class CannotExtendJavaEnum(sym: Symbol)(using Context) + extends SyntaxMsg(CannotExtendJavaEnumID) { + def msg(using Context) = i"""$sym cannot extend ${hl("java.lang.Enum")}: only enums defined with the ${hl("enum")} syntax can""" + def explain(using Context) = "" } - class DuplicateNamedTypeParameter(name: Name)(using Context) - extends SyntaxMsg(DuplicateNamedTypeParameterID) { - def msg = em"Type parameter $name was defined multiple times." - def explain = "" +class CannotExtendContextFunction(sym: Symbol)(using Context) + extends SyntaxMsg(CannotExtendFunctionID) { + def msg(using Context) = i"""$sym cannot extend a context function class""" + def explain(using Context) = "" } - class UndefinedNamedTypeParameter(undefinedName: Name, definedNames: List[Name])(using Context) - extends SyntaxMsg(UndefinedNamedTypeParameterID) { - def msg = em"Type parameter $undefinedName is undefined. Expected one of ${definedNames.map(_.show).mkString(", ")}." - def explain = "" +class JavaEnumParentArgs(parent: Type)(using Context) + extends TypeMsg(JavaEnumParentArgsID) { + def msg(using Context) = i"""not enough arguments for constructor Enum: ${hl("(name: String, ordinal: Int)")}: ${hl(parent.show)}""" + def explain(using Context) = "" } - class IllegalStartOfStatement(what: String, isModifier: Boolean, isStat: Boolean)(using Context) extends SyntaxMsg(IllegalStartOfStatementID) { - def msg = - if isStat then - "this kind of statement is not allowed here" - else - val addendum = if isModifier then ": this modifier is not allowed here" else "" - s"Illegal start of $what$addendum" - def explain = - i"""A statement is an import or export, a definition or an expression. - |Some statements are only allowed in certain contexts""" +class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(using Context) + extends NamingMsg(CannotHaveSameNameAsID) { + import CannotHaveSameNameAs._ + def reasonMessage(using Context): String = reason match { + case CannotBeOverridden => "class definitions cannot be overridden" + case DefinedInSelf(self) => + s"""cannot define ${sym.showKind} member with the same name as a ${cls.showKind} member in self reference ${self.name}. + |(Note: this can be resolved by using another name) + |""".stripMargin } - class TraitIsExpected(symbol: Symbol)(using Context) extends SyntaxMsg(TraitIsExpectedID) { - def msg = em"$symbol is not a trait" - def explain = { - val errorCodeExample = - """class A - |class B - | - |val a = new A with B // will fail with a compile error - class B is not a trait""".stripMargin - val codeExample = - """class A - |trait B - | - |val a = new A with B // compiles normally""".stripMargin + def msg(using Context) = i"""$sym cannot have the same name as ${cls.showLocated} -- """ + reasonMessage + def explain(using Context) = "" +} +object CannotHaveSameNameAs { + sealed trait Reason + case object CannotBeOverridden extends Reason + case class DefinedInSelf(self: tpd.ValDef) extends Reason +} + +class ValueClassesMayNotDefineInner(valueClass: Symbol, inner: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotDefineInnerID) { + def msg(using Context) = i"""Value classes may not define an inner class""" + def explain(using Context) = "" +} + +class ValueClassesMayNotDefineNonParameterField(valueClass: Symbol, field: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotDefineNonParameterFieldID) { + def msg(using Context) = i"""Value classes may not define non-parameter field""" + def explain(using Context) = "" +} + +class ValueClassesMayNotDefineASecondaryConstructor(valueClass: Symbol, constructor: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotDefineASecondaryConstructorID) { + def msg(using Context) = i"""Value classes may not define a secondary constructor""" + def explain(using Context) = "" +} + +class ValueClassesMayNotContainInitalization(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotContainInitalizationID) { + def msg(using Context) = i"""Value classes may not contain initialization statements""" + def explain(using Context) = "" +} + +class ValueClassesMayNotBeAbstract(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotBeAbstractID) { + def msg(using Context) = i"""Value classes may not be ${hl("abstract")}""" + def explain(using Context) = "" +} + +class ValueClassesMayNotBeContainted(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotBeContaintedID) { + private def localOrMember = if (valueClass.owner.isTerm) "local class" else "member of another class" + def msg(using Context) = s"""Value classes may not be a $localOrMember""" + def explain(using Context) = "" +} + +class ValueClassesMayNotWrapAnotherValueClass(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassesMayNotWrapAnotherValueClassID) { + def msg(using Context) = """A value class may not wrap another user-defined value class""" + def explain(using Context) = "" +} + +class ValueClassParameterMayNotBeAVar(valueClass: Symbol, param: Symbol)(using Context) + extends SyntaxMsg(ValueClassParameterMayNotBeAVarID) { + def msg(using Context) = i"""A value class parameter may not be a ${hl("var")}""" + def explain(using Context) = + i"""A value class must have exactly one ${hl("val")} parameter.""" +} + +class ValueClassNeedsOneValParam(valueClass: Symbol)(using Context) + extends SyntaxMsg(ValueClassNeedsExactlyOneValParamID) { + def msg(using Context) = i"""Value class needs one ${hl("val")} parameter""" + def explain(using Context) = "" +} + +class ValueClassParameterMayNotBeCallByName(valueClass: Symbol, param: Symbol)(using Context) + extends SyntaxMsg(ValueClassParameterMayNotBeCallByNameID) { + def msg(using Context) = s"Value class parameter `${param.name}` may not be call-by-name" + def explain(using Context) = "" +} + +class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context) + extends SyntaxMsg(SuperCallsNotAllowedInlineableID) { + def msg(using Context) = i"Super call not allowed in inlineable $symbol" + def explain(using Context) = "Method inlining prohibits calling superclass methods, as it may lead to confusion about which super is being called." +} + +class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathID): + def msg(using Context) = i"$tp is not a valid $usage, since it is not an immutable path" + def explain(using Context) = + i"""An immutable path is + | - a reference to an immutable value, or + | - a reference to `this`, or + | - a selection of an immutable path with an immutable value.""" + +class WrongNumberOfParameters(expected: Int)(using Context) + extends SyntaxMsg(WrongNumberOfParametersID) { + def msg(using Context) = s"Wrong number of parameters, expected: $expected" + def explain(using Context) = "" +} + +class DuplicatePrivateProtectedQualifier()(using Context) + extends SyntaxMsg(DuplicatePrivateProtectedQualifierID) { + def msg(using Context) = "Duplicate private/protected qualifier" + def explain(using Context) = + i"It is not allowed to combine `private` and `protected` modifiers even if they are qualified to different scopes" +} + +class ExpectedStartOfTopLevelDefinition()(using Context) + extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID) { + def msg(using Context) = "Expected start of definition" + def explain(using Context) = + i"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after qualifiers" +} + +class NoReturnFromInlineable(owner: Symbol)(using Context) + extends SyntaxMsg(NoReturnFromInlineableID) { + def msg(using Context) = i"No explicit ${hl("return")} allowed from inlineable $owner" + def explain(using Context) = + i"""Methods marked with ${hl("inline")} modifier may not use ${hl("return")} statements. + |Instead, you should rely on the last expression's value being + |returned from a method. + |""" +} + +class ReturnOutsideMethodDefinition(owner: Symbol)(using Context) + extends SyntaxMsg(ReturnOutsideMethodDefinitionID) { + def msg(using Context) = i"${hl("return")} outside method definition" + def explain(using Context) = + i"""You used ${hl("return")} in ${owner}. + |${hl("return")} is a keyword and may only be used within method declarations. + |""" +} + +class ExtendFinalClass(clazz:Symbol, finalClazz: Symbol)(using Context) + extends SyntaxMsg(ExtendFinalClassID) { + def msg(using Context) = i"$clazz cannot extend ${hl("final")} $finalClazz" + def explain(using Context) = + i"""A class marked with the ${hl("final")} keyword cannot be extended""" +} + +class ExpectedTypeBoundOrEquals(found: Token)(using Context) + extends SyntaxMsg(ExpectedTypeBoundOrEqualsID) { + def msg(using Context) = i"${hl("=")}, ${hl(">:")}, or ${hl("<:")} expected, but ${Tokens.showToken(found)} found" + + def explain(using Context) = + i"""Type parameters and abstract types may be constrained by a type bound. + |Such type bounds limit the concrete values of the type variables and possibly + |reveal more information about the members of such types. + | + |A lower type bound ${hl("B >: A")} expresses that the type variable ${hl("B")} + |refers to a supertype of type ${hl("A")}. + | + |An upper type bound ${hl("T <: A")} declares that type variable ${hl("T")} + |refers to a subtype of type ${hl("A")}. + |""" +} + +class ClassAndCompanionNameClash(cls: Symbol, other: Symbol)(using Context) + extends NamingMsg(ClassAndCompanionNameClashID) { + def msg(using Context) = + val name = cls.name.stripModuleClassSuffix + i"Name clash: both ${cls.owner} and its companion object defines $name" + def explain(using Context) = + i"""|A ${cls.kindString} and its companion object cannot both define a ${hl("class")}, ${hl("trait")} or ${hl("object")} with the same name: + | - ${cls.owner} defines ${cls} + | - ${other.owner} defines ${other}""" +} + +class TailrecNotApplicable(symbol: Symbol)(using Context) + extends SyntaxMsg(TailrecNotApplicableID) { + def msg(using Context) = { + val reason = + if !symbol.is(Method) then i"$symbol isn't a method" + else if symbol.is(Deferred) then i"$symbol is abstract" + else if !symbol.isEffectivelyFinal then i"$symbol is neither ${hl("private")} nor ${hl("final")} so can be overridden" + else i"$symbol contains no recursive calls" + + s"TailRec optimisation not applicable, $reason" + } + def explain(using Context) = "" +} + +class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol], classRoot: Symbol)(using Context) + extends Message(FailureToEliminateExistentialID) { + def kind = MessageKind.Compatibility + def msg(using Context) = + val originalType = ctx.printer.dclsText(boundSyms, "; ").show + i"""An existential type that came from a Scala-2 classfile for $classRoot + |cannot be mapped accurately to a Scala-3 equivalent. + |original type : $tp forSome ${originalType} + |reduces to : $tp1 + |type used instead: $tp2 + |This choice can cause follow-on type errors or hide type errors. + |Proceed at own risk.""" + def explain(using Context) = + i"""Existential types in their full generality are no longer supported. + |Scala-3 does applications of class types to wildcard type arguments. + |Other forms of existential types that come from Scala-2 classfiles + |are only approximated in a best-effort way.""" +} + +class OnlyFunctionsCanBeFollowedByUnderscore(tp: Type)(using Context) + extends SyntaxMsg(OnlyFunctionsCanBeFollowedByUnderscoreID) { + def msg(using Context) = i"Only function types can be followed by ${hl("_")} but the current expression has type $tp" + def explain(using Context) = + i"""The syntax ${hl("x _")} is no longer supported if ${hl("x")} is not a function. + |To convert to a function value, you need to explicitly write ${hl("() => x")}""" +} + +class MissingEmptyArgumentList(method: String)(using Context) + extends SyntaxMsg(MissingEmptyArgumentListID) { + def msg(using Context) = i"$method must be called with ${hl("()")} argument" + def explain(using Context) = { + val codeExample = + """def next(): T = ... + |next // is expanded to next()""" + + i"""Previously an empty argument list () was implicitly inserted when calling a nullary method without arguments. E.g. + | + |$codeExample + | + |In Dotty, this idiom is an error. The application syntax has to follow exactly the parameter syntax. + |Excluded from this rule are methods that are defined in Java or that override methods defined in Java.""" + } +} + +class DuplicateNamedTypeParameter(name: Name)(using Context) + extends SyntaxMsg(DuplicateNamedTypeParameterID) { + def msg(using Context) = i"Type parameter $name was defined multiple times." + def explain(using Context) = "" +} + +class UndefinedNamedTypeParameter(undefinedName: Name, definedNames: List[Name])(using Context) + extends SyntaxMsg(UndefinedNamedTypeParameterID) { + def msg(using Context) = i"Type parameter $undefinedName is undefined. Expected one of ${definedNames.map(_.show).mkString(", ")}." + def explain(using Context) = "" +} + +class IllegalStartOfStatement(what: String, isModifier: Boolean, isStat: Boolean)(using Context) extends SyntaxMsg(IllegalStartOfStatementID) { + def msg(using Context) = + if isStat then + "this kind of statement is not allowed here" + else + val addendum = if isModifier then ": this modifier is not allowed here" else "" + s"Illegal start of $what$addendum" + def explain(using Context) = + i"""A statement is an import or export, a definition or an expression. + |Some statements are only allowed in certain contexts""" +} + +class TraitIsExpected(symbol: Symbol)(using Context) extends SyntaxMsg(TraitIsExpectedID) { + def msg(using Context) = i"$symbol is not a trait" + def explain(using Context) = { + val errorCodeExample = + """class A + |class B + | + |val a = new A with B // will fail with a compile error - class B is not a trait""".stripMargin + val codeExample = + """class A + |trait B + | + |val a = new A with B // compiles normally""".stripMargin - em"""Only traits can be mixed into classes using a ${hl("with")} keyword. - |Consider the following example: - | - |$errorCodeExample - | - |The example mentioned above would fail because B is not a trait. - |But if you make B a trait it will be compiled without any errors: - | - |$codeExample - |""" - } + i"""Only traits can be mixed into classes using a ${hl("with")} keyword. + |Consider the following example: + | + |$errorCodeExample + | + |The example mentioned above would fail because B is not a trait. + |But if you make B a trait it will be compiled without any errors: + | + |$codeExample + |""" } +} - class TraitRedefinedFinalMethodFromAnyRef(method: Symbol)(using Context) extends SyntaxMsg(TraitRedefinedFinalMethodFromAnyRefID) { - def msg = em"Traits cannot redefine final $method from ${hl("class AnyRef")}." - def explain = "" - } +class TraitRedefinedFinalMethodFromAnyRef(method: Symbol)(using Context) extends SyntaxMsg(TraitRedefinedFinalMethodFromAnyRefID) { + def msg(using Context) = i"Traits cannot redefine final $method from ${hl("class AnyRef")}." + def explain(using Context) = "" +} - class AlreadyDefined(name: Name, owner: Symbol, conflicting: Symbol)(using Context) extends NamingMsg(AlreadyDefinedID): - private def where: String = +class AlreadyDefined(name: Name, owner: Symbol, conflicting: Symbol)(using Context) +extends NamingMsg(AlreadyDefinedID): + def msg(using Context) = + def where: String = if conflicting.effectiveOwner.is(Package) && conflicting.associatedFile != null then i" in ${conflicting.associatedFile}" else if conflicting.owner == owner then "" else i" in ${conflicting.owner}" - private def note = + def note = if owner.is(Method) || conflicting.is(Method) then "\n\nNote that overloaded methods must all be defined in the same group of toplevel definitions" else "" - def msg = - if conflicting.isTerm != name.isTermName then - em"$name clashes with $conflicting$where; the two must be defined together" - else - em"$name is already defined as $conflicting$where$note" - def explain = "" - - class PackageNameAlreadyDefined(pkg: Symbol)(using Context) extends NamingMsg(PackageNameAlreadyDefinedID) { - lazy val (where, or) = - if pkg.associatedFile == null then ("", "") - else (s" in ${pkg.associatedFile}", " or delete the containing class file") - def msg = em"""${pkg.name} is the name of $pkg$where. - |It cannot be used at the same time as the name of a package.""" - def explain = - em"""An ${hl("object")} or other toplevel definition cannot have the same name as an existing ${hl("package")}. - |Rename either one of them$or.""" - } - - class UnapplyInvalidNumberOfArguments(qual: untpd.Tree, argTypes: List[Type])(using Context) - extends SyntaxMsg(UnapplyInvalidNumberOfArgumentsID) { - def msg = em"Wrong number of argument patterns for $qual; expected: ($argTypes%, %)" - def explain = - em"""The Unapply method of $qual was used with incorrect number of arguments. - |Expected usage would be something like: - |case $qual(${argTypes.map(_ => '_')}%, %) => ... - | - |where subsequent arguments would have following types: ($argTypes%, %). - |""".stripMargin - } - - class UnapplyInvalidReturnType(unapplyResult: Type, unapplyName: Name)(using Context) - extends DeclarationMsg(UnapplyInvalidReturnTypeID) { - def msg = - val addendum = - if Feature.migrateTo3 && unapplyName == nme.unapplySeq - then "\nYou might want to try to rewrite the extractor to use `unapply` instead." - else "" - em"""| ${Red(i"$unapplyResult")} is not a valid result type of an $unapplyName method of an ${Magenta("extractor")}.$addendum""" - def explain = if (unapplyName.show == "unapply") - em""" - |To be used as an extractor, an unapply method has to return a type that either: - | - has members ${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} (usually an ${Green("Option[S]")}) - | - is a ${Green("Boolean")} - | - is a ${Green("Product")} (like a ${Magenta("Tuple2[T1, T2]")}) - | - |class A(val i: Int) - | - |object B { - | def unapply(a: A): ${Green("Option[Int]")} = Some(a.i) - |} - | - |object C { - | def unapply(a: A): ${Green("Boolean")} = a.i == 2 - |} - | - |object D { - | def unapply(a: A): ${Green("(Int, Int)")} = (a.i, a.i) - |} - | - |object Test { - | def test(a: A) = a match { - | ${Magenta("case B(1)")} => 1 - | ${Magenta("case a @ C()")} => 2 - | ${Magenta("case D(3, 3)")} => 3 - | } - |} - """.stripMargin + if conflicting.isTerm != name.isTermName then + i"$name clashes with $conflicting$where; the two must be defined together" else - em""" - |To be used as an extractor, an unapplySeq method has to return a type which has members - |${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} where ${Magenta("S <: Seq[V]")} (usually an ${Green("Option[Seq[V]]")}): - | - |object CharList { - | def unapplySeq(s: String): ${Green("Option[Seq[Char]")} = Some(s.toList) - | - | "example" match { - | ${Magenta("case CharList(c1, c2, c3, c4, _, _, _)")} => - | println(s"$$c1,$$c2,$$c3,$$c4") - | case _ => - | println("Expected *exactly* 7 characters!") - | } - |} - """.stripMargin - } - - class StaticFieldsOnlyAllowedInObjects(member: Symbol)(using Context) extends SyntaxMsg(StaticFieldsOnlyAllowedInObjectsID) { - def msg = em"${hl("@static")} $member in ${member.owner} must be defined inside a static ${hl("object")}." - def explain = - em"${hl("@static")} members are only allowed inside objects." - } - - class StaticFieldsShouldPrecedeNonStatic(member: Symbol, defns: List[tpd.Tree])(using Context) extends SyntaxMsg(StaticFieldsShouldPrecedeNonStaticID) { - def msg = em"${hl("@static")} $member in ${member.owner} must be defined before non-static fields." - def explain = { - val nonStatics = defns.takeWhile(_.symbol != member).take(3).filter(_.isInstanceOf[tpd.ValDef]) - val codeExample = s"""object ${member.owner.name.firstPart} { - | @static ${member} = ... - | ${nonStatics.map(m => s"${m.symbol} = ...").mkString("\n ")} - | ... - |}""" - em"""The fields annotated with @static should precede any non @static fields. - |This ensures that we do not introduce surprises for users in initialization order of this class. - |Static field are initialized when class loading the code of Foo. - |Non static fields are only initialized the first time that Foo is accessed. - | - |The definition of ${member.name} should have been before the non ${hl("@static val")}s: - |$codeExample + i"$name is already defined as $conflicting$where$note" + def explain(using Context) = "" + +class PackageNameAlreadyDefined(pkg: Symbol)(using Context) extends NamingMsg(PackageNameAlreadyDefinedID) { + def msg(using Context) = + def where = if pkg.associatedFile == null then "" else s" in ${pkg.associatedFile}" + i"""${pkg.name} is the name of $pkg$where. + |It cannot be used at the same time as the name of a package.""" + def explain(using Context) = + def or = if pkg.associatedFile == null then "" else " or delete the containing class file" + i"""An ${hl("object")} or other toplevel definition cannot have the same name as an existing ${hl("package")}. + |Rename either one of them$or.""" +} + +class UnapplyInvalidNumberOfArguments(qual: untpd.Tree, argTypes: List[Type])(using Context) + extends SyntaxMsg(UnapplyInvalidNumberOfArgumentsID) { + def msg(using Context) = i"Wrong number of argument patterns for $qual; expected: ($argTypes%, %)" + def explain(using Context) = + i"""The Unapply method of $qual was used with incorrect number of arguments. + |Expected usage would be something like: + |case $qual(${argTypes.map(_ => '_')}%, %) => ... + | + |where subsequent arguments would have following types: ($argTypes%, %). |""" - } - } - - class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) extends SyntaxMsg(CyclicInheritanceID) { - def msg = em"Cyclic inheritance: $symbol extends itself$addendum" - def explain = { - val codeExample = "class A extends A" - - em"""Cyclic inheritance is prohibited in Dotty. - |Consider the following example: - | - |$codeExample - | - |The example mentioned above would fail because this type of inheritance hierarchy - |creates a "cycle" where a not yet defined class A extends itself which makes - |impossible to instantiate an object of this class""" - } - } - - class BadSymbolicReference(denot: SymDenotation)(using Context) - extends ReferenceMsg(BadSymbolicReferenceID) { - def msg = { - val denotationOwner = denot.owner - val denotationName = ctx.fresh.setSetting(ctx.settings.YdebugNames, true).printer.nameString(denot.name) - val file = denot.symbol.associatedFile - val (location, src) = - if (file != null) (s" in $file", file.toString) - else ("", "the signature") - - em"""Bad symbolic reference. A signature$location - |refers to $denotationName in ${denotationOwner.showKind} ${denotationOwner.showFullName} which is not available. - |It may be completely missing from the current classpath, or the version on - |the classpath might be incompatible with the version used when compiling $src.""" - } - - def explain = "" - } - - class UnableToExtendSealedClass(pclazz: Symbol)(using Context) extends SyntaxMsg(UnableToExtendSealedClassID) { - def msg = em"Cannot extend ${hl("sealed")} $pclazz in a different source file" - def explain = "A sealed class or trait can only be extended in the same file as its declaration" - } - - class SymbolHasUnparsableVersionNumber(symbol: Symbol, errorMessage: String)(using Context) - extends SyntaxMsg(SymbolHasUnparsableVersionNumberID) { - def msg = em"${symbol.showLocated} has an unparsable version number: $errorMessage" - def explain = - em"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics - |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs - |whose behavior may have changed since version change.""" - } - - class SymbolChangedSemanticsInVersion( - symbol: Symbol, - migrationVersion: ScalaVersion, - migrationMessage: String - )(using Context) extends SyntaxMsg(SymbolChangedSemanticsInVersionID) { - def msg = em"${symbol.showLocated} has changed semantics in version $migrationVersion: $migrationMessage" - def explain = - em"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics - |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs - |whose behavior may have changed since version change.""" - } - - class UnableToEmitSwitch()(using Context) - extends SyntaxMsg(UnableToEmitSwitchID) { - def msg = em"Could not emit switch for ${hl("@switch")} annotated match" - def explain = { - val codeExample = - """val ConstantB = 'B' - |final val ConstantC = 'C' - |def tokenMe(ch: Char) = (ch: @switch) match { - | case '\t' | '\n' => 1 - | case 'A' => 2 - | case ConstantB => 3 // a non-literal may prevent switch generation: this would not compile - | case ConstantC => 4 // a constant value is allowed - | case _ => 5 - |}""".stripMargin - - em"""If annotated with ${hl("@switch")}, the compiler will verify that the match has been compiled to a - |tableswitch or lookupswitch and issue an error if it instead compiles into a series of conditional - |expressions. Example usage: - | - |$codeExample - | - |The compiler will not apply the optimisation if: - |- the matched value is not of type ${hl("Int")}, ${hl("Byte")}, ${hl("Short")} or ${hl("Char")} - |- the matched value is not a constant literal - |- there are less than three cases""" - } - } - - class MissingCompanionForStatic(member: Symbol)(using Context) - extends SyntaxMsg(MissingCompanionForStaticID) { - def msg = em"${member.owner} does not have a companion class" - def explain = - em"An object that contains ${hl("@static")} members must have a companion class." - } - - class PolymorphicMethodMissingTypeInParent(rsym: Symbol, parentSym: Symbol)(using Context) - extends SyntaxMsg(PolymorphicMethodMissingTypeInParentID) { - def msg = em"Polymorphic refinement $rsym without matching type in parent $parentSym is no longer allowed" - def explain = - em"""Polymorphic $rsym is not allowed in the structural refinement of $parentSym because - |$rsym does not override any method in $parentSym. Structural refinement does not allow for - |polymorphic methods.""" - } - - class ParamsNoInline(owner: Symbol)(using Context) - extends SyntaxMsg(ParamsNoInlineID) { - def msg = em"""${hl("inline")} modifier can only be used for parameters of inline methods""" - def explain = "" - } - - class JavaSymbolIsNotAValue(symbol: Symbol)(using Context) extends TypeMsg(JavaSymbolIsNotAValueID) { - def msg = { - val kind = - if (symbol is Package) em"$symbol" - else em"Java defined ${hl("class " + symbol.name)}" - - s"$kind is not a value" - } - def explain = "" - } - - class DoubleDefinition(decl: Symbol, previousDecl: Symbol, base: Symbol)(using Context) extends NamingMsg(DoubleDefinitionID) { - def msg = { - def nameAnd = if (decl.name != previousDecl.name) " name and" else "" - def erasedType = if ctx.erasedTypes then i" ${decl.info}" else "" - def details(using Context): String = - if (decl.isRealMethod && previousDecl.isRealMethod) { - import Signature.MatchDegree._ - - // compare the signatures when both symbols represent methods - decl.signature.matchDegree(previousDecl.signature) match { - case NoMatch => - // If the signatures don't match at all at the current phase, then - // they might match after erasure. - if ctx.phase.id <= elimErasedValueTypePhase.id then - atPhase(elimErasedValueTypePhase.next)(details) - else - "" // shouldn't be reachable - case ParamMatch => - "have matching parameter types." - case MethodNotAMethodMatch => - "neither has parameters." - case FullMatch => - val hint = - if !decl.hasAnnotation(defn.TargetNameAnnot) - && !previousDecl.hasAnnotation(defn.TargetNameAnnot) - then - i""" - | - |Consider adding a @targetName annotation to one of the conflicting definitions - |for disambiguation.""" - else "" - i"have the same$nameAnd type$erasedType after erasure.$hint" - } +} + +class UnapplyInvalidReturnType(unapplyResult: Type, unapplyName: Name)(using Context) + extends DeclarationMsg(UnapplyInvalidReturnTypeID) { + def msg(using Context) = + val addendum = + if Feature.migrateTo3 && unapplyName == nme.unapplySeq + then "\nYou might want to try to rewrite the extractor to use `unapply` instead." + else "" + i"""| ${Red(i"$unapplyResult")} is not a valid result type of an $unapplyName method of an ${Magenta("extractor")}.$addendum""" + def explain(using Context) = if (unapplyName.show == "unapply") + i""" + |To be used as an extractor, an unapply method has to return a type that either: + | - has members ${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} (usually an ${Green("Option[S]")}) + | - is a ${Green("Boolean")} + | - is a ${Green("Product")} (like a ${Magenta("Tuple2[T1, T2]")}) of arity i with i >= 1, and has members _1 to _i + | + |See: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html#fixed-arity-extractors + | + |Examples: + | + |class A(val i: Int) + | + |object B { + | def unapply(a: A): ${Green("Option[Int]")} = Some(a.i) + |} + | + |object C { + | def unapply(a: A): ${Green("Boolean")} = a.i == 2 + |} + | + |object D { + | def unapply(a: A): ${Green("(Int, Int)")} = (a.i, a.i) + |} + | + |object Test { + | def test(a: A) = a match { + | ${Magenta("case B(1)")} => 1 + | ${Magenta("case a @ C()")} => 2 + | ${Magenta("case D(3, 3)")} => 3 + | } + |} + """ + else + i""" + |To be used as an extractor, an unapplySeq method has to return a type which has members + |${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} where ${Magenta("S <: Seq[V]")} (usually an ${Green("Option[Seq[V]]")}): + | + |object CharList { + | def unapplySeq(s: String): ${Green("Option[Seq[Char]")} = Some(s.toList) + | + | "example" match { + | ${Magenta("case CharList(c1, c2, c3, c4, _, _, _)")} => + | println(s"$$c1,$$c2,$$c3,$$c4") + | case _ => + | println("Expected *exactly* 7 characters!") + | } + |} + """ +} + +class StaticFieldsOnlyAllowedInObjects(member: Symbol)(using Context) extends SyntaxMsg(StaticFieldsOnlyAllowedInObjectsID) { + def msg(using Context) = i"${hl("@static")} $member in ${member.owner} must be defined inside a static ${hl("object")}." + def explain(using Context) = + i"${hl("@static")} members are only allowed inside objects." +} + +class StaticFieldsShouldPrecedeNonStatic(member: Symbol, defns: List[tpd.Tree])(using Context) extends SyntaxMsg(StaticFieldsShouldPrecedeNonStaticID) { + def msg(using Context) = i"${hl("@static")} $member in ${member.owner} must be defined before non-static fields." + def explain(using Context) = { + val nonStatics = defns.takeWhile(_.symbol != member).take(3).filter(_.isInstanceOf[tpd.ValDef]) + val codeExample = s"""object ${member.owner.name.firstPart} { + | @static ${member} = ... + | ${nonStatics.map(m => s"${m.symbol} = ...").mkString("\n ")} + | ... + |}""" + i"""The fields annotated with @static should precede any non @static fields. + |This ensures that we do not introduce surprises for users in initialization order of this class. + |Static field are initialized when class loading the code of Foo. + |Non static fields are only initialized the first time that Foo is accessed. + | + |The definition of ${member.name} should have been before the non ${hl("@static val")}s: + |$codeExample + |""" + } +} + +class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) extends SyntaxMsg(CyclicInheritanceID) { + def msg(using Context) = i"Cyclic inheritance: $symbol extends itself$addendum" + def explain(using Context) = { + val codeExample = "class A extends A" + + i"""Cyclic inheritance is prohibited in Dotty. + |Consider the following example: + | + |$codeExample + | + |The example mentioned above would fail because this type of inheritance hierarchy + |creates a "cycle" where a not yet defined class A extends itself which makes + |impossible to instantiate an object of this class""" + } +} + +class BadSymbolicReference(denot: SymDenotation)(using Context) +extends ReferenceMsg(BadSymbolicReferenceID) { + def msg(using Context) = { + val denotationOwner = denot.owner + val denotationName = ctx.fresh.setSetting(ctx.settings.YdebugNames, true).printer.nameString(denot.name) + val file = denot.symbol.associatedFile + val (location, src) = + if (file != null) (s" in $file", file.toString) + else ("", "the signature") + + i"""Bad symbolic reference. A signature$location + |refers to $denotationName in ${denotationOwner.showKind} ${denotationOwner.showFullName} which is not available. + |It may be completely missing from the current classpath, or the version on + |the classpath might be incompatible with the version used when compiling $src.""" + } + + def explain(using Context) = "" +} + +class UnableToExtendSealedClass(pclazz: Symbol)(using Context) extends SyntaxMsg(UnableToExtendSealedClassID) { + def msg(using Context) = i"Cannot extend ${hl("sealed")} $pclazz in a different source file" + def explain(using Context) = "A sealed class or trait can only be extended in the same file as its declaration" +} + +class SymbolHasUnparsableVersionNumber(symbol: Symbol, errorMessage: String)(using Context) +extends SyntaxMsg(SymbolHasUnparsableVersionNumberID) { + def msg(using Context) = i"${symbol.showLocated} has an unparsable version number: $errorMessage" + def explain(using Context) = + i"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics + |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs + |whose behavior may have changed since version change.""" +} + +class SymbolChangedSemanticsInVersion( + symbol: Symbol, + migrationVersion: ScalaVersion, + migrationMessage: String +)(using Context) extends SyntaxMsg(SymbolChangedSemanticsInVersionID) { + def msg(using Context) = i"${symbol.showLocated} has changed semantics in version $migrationVersion: $migrationMessage" + def explain(using Context) = + i"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics + |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs + |whose behavior may have changed since version change.""" +} + +class UnableToEmitSwitch()(using Context) +extends SyntaxMsg(UnableToEmitSwitchID) { + def msg(using Context) = i"Could not emit switch for ${hl("@switch")} annotated match" + def explain(using Context) = { + val codeExample = + """val ConstantB = 'B' + |final val ConstantC = 'C' + |def tokenMe(ch: Char) = (ch: @switch) match { + | case '\t' | '\n' => 1 + | case 'A' => 2 + | case ConstantB => 3 // a non-literal may prevent switch generation: this would not compile + | case ConstantC => 4 // a constant value is allowed + | case _ => 5 + |}""".stripMargin + + i"""If annotated with ${hl("@switch")}, the compiler will verify that the match has been compiled to a + |tableswitch or lookupswitch and issue an error if it instead compiles into a series of conditional + |expressions. Example usage: + | + |$codeExample + | + |The compiler will not apply the optimisation if: + |- the matched value is not of type ${hl("Int")}, ${hl("Byte")}, ${hl("Short")} or ${hl("Char")} + |- the matched value is not a constant literal + |- there are less than three cases""" + } +} + +class MissingCompanionForStatic(member: Symbol)(using Context) +extends SyntaxMsg(MissingCompanionForStaticID) { + def msg(using Context) = i"${member.owner} does not have a companion class" + def explain(using Context) = + i"An object that contains ${hl("@static")} members must have a companion class." +} + +class PolymorphicMethodMissingTypeInParent(rsym: Symbol, parentSym: Symbol)(using Context) +extends SyntaxMsg(PolymorphicMethodMissingTypeInParentID) { + def msg(using Context) = i"Polymorphic refinement $rsym without matching type in parent $parentSym is no longer allowed" + def explain(using Context) = + i"""Polymorphic $rsym is not allowed in the structural refinement of $parentSym because + |$rsym does not override any method in $parentSym. Structural refinement does not allow for + |polymorphic methods.""" +} + +class ParamsNoInline(owner: Symbol)(using Context) + extends SyntaxMsg(ParamsNoInlineID) { + def msg(using Context) = i"""${hl("inline")} modifier can only be used for parameters of inline methods""" + def explain(using Context) = "" +} + +class JavaSymbolIsNotAValue(symbol: Symbol)(using Context) extends TypeMsg(JavaSymbolIsNotAValueID) { + def msg(using Context) = + val kind = + if symbol is Package then i"$symbol" + else i"Java defined ${hl("class " + symbol.name)}" + s"$kind is not a value" + def explain(using Context) = "" +} + +class DoubleDefinition(decl: Symbol, previousDecl: Symbol, base: Symbol)(using Context) +extends NamingMsg(DoubleDefinitionID) { + def msg(using Context) = { + def nameAnd = if (decl.name != previousDecl.name) " name and" else "" + def erasedType = if ctx.erasedTypes then i" ${decl.info}" else "" + def details(using Context): String = + if (decl.isRealMethod && previousDecl.isRealMethod) { + import Signature.MatchDegree._ + + // compare the signatures when both symbols represent methods + decl.signature.matchDegree(previousDecl.signature) match { + case NoMatch => + // If the signatures don't match at all at the current phase, then + // they might match after erasure. + if ctx.phase.id <= elimErasedValueTypePhase.id then + atPhase(elimErasedValueTypePhase.next)(details) + else + "" // shouldn't be reachable + case ParamMatch => + "have matching parameter types." + case MethodNotAMethodMatch => + "neither has parameters." + case FullMatch => + val hint = + if !decl.hasAnnotation(defn.TargetNameAnnot) + && !previousDecl.hasAnnotation(defn.TargetNameAnnot) + then + i""" + | + |Consider adding a @targetName annotation to one of the conflicting definitions + |for disambiguation.""" + else "" + i"have the same$nameAnd type$erasedType after erasure.$hint" } - else "" - def symLocation(sym: Symbol) = { - val lineDesc = - if (sym.span.exists && sym.span != sym.owner.span) - s" at line ${sym.srcPos.line + 1}" - else "" - i"in ${sym.owner}${lineDesc}" } - val clashDescription = - if (decl.owner eq previousDecl.owner) - "Double definition" - else if ((decl.owner eq base) || (previousDecl eq base)) - "Name clash between defined and inherited member" - else - "Name clash between inherited members" - - atPhase(typerPhase) { - em"""$clashDescription: - |${previousDecl.showDcl} ${symLocation(previousDecl)} and - |${decl.showDcl} ${symLocation(decl)} - |""" - } + details + else "" + def symLocation(sym: Symbol) = { + val lineDesc = + if (sym.span.exists && sym.span != sym.owner.span) + s" at line ${sym.srcPos.line + 1}" + else "" + i"in ${sym.owner}${lineDesc}" } - def explain = "" - } - - class ImportRenamedTwice(ident: untpd.Ident)(using Context) extends SyntaxMsg(ImportRenamedTwiceID) { - def msg = s"${ident.show} is renamed twice on the same import line." - def explain = "" - } - - class TypeTestAlwaysDiverges(scrutTp: Type, testTp: Type)(using Context) extends SyntaxMsg(TypeTestAlwaysDivergesID) { - def msg = - s"This type test will never return a result since the scrutinee type ${scrutTp.show} does not contain any value." - def explain = "" - } - - // Relative of CyclicReferenceInvolvingImplicit and RecursiveValueNeedsResultType - class TermMemberNeedsResultTypeForImplicitSearch(cycleSym: Symbol)(using Context) - extends CyclicMsg(TermMemberNeedsNeedsResultTypeForImplicitSearchID) { - def msg = em"""$cycleSym needs result type because its right-hand side attempts implicit search""" - def explain = - em"""|The right hand-side of $cycleSym's definition requires an implicit search at the highlighted position. - |To avoid this error, give `$cycleSym` an explicit type. - |""".stripMargin - } - - class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(using Context) extends SyntaxMsg(ClassCannotExtendEnumID) { - def msg = em"""$cls in ${cls.owner} extends enum ${parent.name}, but extending enums is prohibited.""" - def explain = "" - } - - class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExtractorID) { - def msg = em"$tree cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method" - def explain = - em"""|An ${hl("unapply")} method should be defined in an ${hl("object")} as follow: - | - If it is just a test, return a ${hl("Boolean")}. For example ${hl("case even()")} - | - If it returns a single sub-value of type T, return an ${hl("Option[T]")} - | - If it returns several sub-values T1,...,Tn, group them in an optional tuple ${hl("Option[(T1,...,Tn)]")} - | - |Sometimes, the number of sub-values isn't fixed and we would like to return a sequence. - |For this reason, you can also define patterns through ${hl("unapplySeq")} which returns ${hl("Option[Seq[T]]")}. - |This mechanism is used for instance in pattern ${hl("case List(x1, ..., xn)")}""".stripMargin - } - - class MemberWithSameNameAsStatic()(using Context) - extends SyntaxMsg(MemberWithSameNameAsStaticID) { - def msg = em"Companion classes cannot define members with same name as a ${hl("@static")} member" - def explain = "" - } - - class PureExpressionInStatementPosition(stat: untpd.Tree, val exprOwner: Symbol)(using Context) - extends Message(PureExpressionInStatementPositionID) { - def kind = MessageKind.PotentialIssue - def msg = "A pure expression does nothing in statement position; you may be omitting necessary parentheses" - def explain = - em"""The pure expression $stat doesn't have any side effect and its result is not assigned elsewhere. - |It can be removed without changing the semantics of the program. This may indicate an error.""".stripMargin - } - - class TraitCompanionWithMutableStatic()(using Context) - extends SyntaxMsg(TraitCompanionWithMutableStaticID) { - def msg = em"Companion of traits cannot define mutable @static fields" - def explain = "" - } - - class LazyStaticField()(using Context) - extends SyntaxMsg(LazyStaticFieldID) { - def msg = em"Lazy @static fields are not supported" - def explain = "" - } - - class StaticOverridingNonStaticMembers()(using Context) - extends SyntaxMsg(StaticOverridingNonStaticMembersID) { - def msg = em"${hl("@static")} members cannot override or implement non-static ones" - def explain = "" - } - - class OverloadInRefinement(rsym: Symbol)(using Context) - extends DeclarationMsg(OverloadInRefinementID) { - def msg = "Refinements cannot introduce overloaded definitions" - def explain = - em"""The refinement `$rsym` introduces an overloaded definition. - |Refinements cannot contain overloaded definitions.""".stripMargin - } - - class NoMatchingOverload(val alternatives: List[SingleDenotation], pt: Type)(using Context) - extends TypeMsg(NoMatchingOverloadID) { - def msg = - em"""None of the ${err.overloadedAltsStr(alternatives)} - |match ${err.expectedTypeStr(pt)}""" - def explain = "" - } - class StableIdentPattern(tree: untpd.Tree, pt: Type)(using Context) - extends TypeMsg(StableIdentPatternID) { - def msg = - em"""Stable identifier required, but $tree found""" - def explain = "" - } + val clashDescription = + if (decl.owner eq previousDecl.owner) + "Double definition" + else if ((decl.owner eq base) || (previousDecl eq base)) + "Name clash between defined and inherited member" + else + "Name clash between inherited members" - class IllegalSuperAccessor(base: Symbol, memberName: Name, targetName: Name, - acc: Symbol, accTp: Type, - other: Symbol, otherTp: Type)(using Context) extends DeclarationMsg(IllegalSuperAccessorID) { - def msg = { - // The mixin containing a super-call that requires a super-accessor - val accMixin = acc.owner - // The class or trait that the super-accessor should resolve too in `base` - val otherMixin = other.owner - // The super-call in `accMixin` - val superCall = hl(i"super.$memberName") - // The super-call that the super-accesors in `base` forwards to - val resolvedSuperCall = hl(i"super[${otherMixin.name}].$memberName") - // The super-call that we would have called if `super` in traits behaved like it - // does in classes, i.e. followed the linearization of the trait itself. - val staticSuperCall = { - val staticSuper = accMixin.asClass.info.parents.reverse - .find(_.nonPrivateMember(memberName) - .matchingDenotation(accMixin.thisType, acc.info, targetName).exists) - val staticSuperName = staticSuper match { - case Some(parent) => - parent.classSymbol.name.show - case None => // Might be reachable under separate compilation - "SomeParent" - } - hl(i"super[$staticSuperName].$memberName") + atPhase(typerPhase) { + i"""$clashDescription: + |${previousDecl.showDcl} ${symLocation(previousDecl)} and + |${decl.showDcl} ${symLocation(decl)} + |""" + } + details + } + def explain(using Context) = "" +} + +class ImportRenamedTwice(ident: untpd.Ident)(using Context) extends SyntaxMsg(ImportRenamedTwiceID) { + def msg(using Context) = s"${ident.show} is renamed twice on the same import line." + def explain(using Context) = "" +} + +class TypeTestAlwaysDiverges(scrutTp: Type, testTp: Type)(using Context) extends SyntaxMsg(TypeTestAlwaysDivergesID) { + def msg(using Context) = + s"This type test will never return a result since the scrutinee type ${scrutTp.show} does not contain any value." + def explain(using Context) = "" +} + +// Relative of CyclicReferenceInvolvingImplicit and RecursiveValueNeedsResultType +class TermMemberNeedsResultTypeForImplicitSearch(cycleSym: Symbol)(using Context) + extends CyclicMsg(TermMemberNeedsNeedsResultTypeForImplicitSearchID) { + def msg(using Context) = i"""$cycleSym needs result type because its right-hand side attempts implicit search""" + def explain(using Context) = + i"""|The right hand-side of $cycleSym's definition requires an implicit search at the highlighted position. + |To avoid this error, give `$cycleSym` an explicit type. + |""" +} + +class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(using Context) extends SyntaxMsg(ClassCannotExtendEnumID) { + def msg(using Context) = i"""$cls in ${cls.owner} extends enum ${parent.name}, but extending enums is prohibited.""" + def explain(using Context) = "" +} + +class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExtractorID) { + def msg(using Context) = i"$tree cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method" + def explain(using Context) = + i"""|An ${hl("unapply")} method should be defined in an ${hl("object")} as follow: + | - If it is just a test, return a ${hl("Boolean")}. For example ${hl("case even()")} + | - If it returns a single sub-value of type T, return an ${hl("Option[T]")} + | - If it returns several sub-values T1,...,Tn, group them in an optional tuple ${hl("Option[(T1,...,Tn)]")} + | + |Sometimes, the number of sub-values isn't fixed and we would like to return a sequence. + |For this reason, you can also define patterns through ${hl("unapplySeq")} which returns ${hl("Option[Seq[T]]")}. + |This mechanism is used for instance in pattern ${hl("case List(x1, ..., xn)")}""" +} + +class MemberWithSameNameAsStatic()(using Context) + extends SyntaxMsg(MemberWithSameNameAsStaticID) { + def msg(using Context) = i"Companion classes cannot define members with same name as a ${hl("@static")} member" + def explain(using Context) = "" +} + +class PureExpressionInStatementPosition(stat: untpd.Tree, val exprOwner: Symbol)(using Context) + extends Message(PureExpressionInStatementPositionID) { + def kind = MessageKind.PotentialIssue + def msg(using Context) = "A pure expression does nothing in statement position; you may be omitting necessary parentheses" + def explain(using Context) = + i"""The pure expression $stat doesn't have any side effect and its result is not assigned elsewhere. + |It can be removed without changing the semantics of the program. This may indicate an error.""" +} + +class UnqualifiedCallToAnyRefMethod(stat: untpd.Tree, method: Symbol)(using Context) + extends Message(UnqualifiedCallToAnyRefMethodID) { + def kind = MessageKind.PotentialIssue + def msg(using Context) = i"Suspicious top-level unqualified call to ${hl(method.name.toString)}" + def explain(using Context) = + i"""Top-level unqualified calls to ${hl("AnyRef")} or ${hl("Any")} methods such as ${hl(method.name.toString)} are + |resolved to calls on ${hl("Predef")} or on imported methods. This might not be what + |you intended.""" +} + +class TraitCompanionWithMutableStatic()(using Context) + extends SyntaxMsg(TraitCompanionWithMutableStaticID) { + def msg(using Context) = i"Companion of traits cannot define mutable @static fields" + def explain(using Context) = "" +} + +class LazyStaticField()(using Context) + extends SyntaxMsg(LazyStaticFieldID) { + def msg(using Context) = i"Lazy @static fields are not supported" + def explain(using Context) = "" +} + +class StaticOverridingNonStaticMembers()(using Context) + extends SyntaxMsg(StaticOverridingNonStaticMembersID) { + def msg(using Context) = i"${hl("@static")} members cannot override or implement non-static ones" + def explain(using Context) = "" +} + +class OverloadInRefinement(rsym: Symbol)(using Context) + extends DeclarationMsg(OverloadInRefinementID) { + def msg(using Context) = "Refinements cannot introduce overloaded definitions" + def explain(using Context) = + i"""The refinement `$rsym` introduces an overloaded definition. + |Refinements cannot contain overloaded definitions.""" +} + +class NoMatchingOverload(val alternatives: List[SingleDenotation], pt: Type)(using Context) + extends TypeMsg(NoMatchingOverloadID) { + def msg(using Context) = + i"""None of the ${err.overloadedAltsStr(alternatives)} + |match ${err.expectedTypeStr(pt)}""" + def explain(using Context) = "" +} +class StableIdentPattern(tree: untpd.Tree, pt: Type)(using Context) + extends TypeMsg(StableIdentPatternID) { + def msg(using Context) = + i"""Stable identifier required, but $tree found""" + def explain(using Context) = "" +} + +class IllegalSuperAccessor(base: Symbol, memberName: Name, targetName: Name, + acc: Symbol, accTp: Type, + other: Symbol, otherTp: Type)(using Context) extends DeclarationMsg(IllegalSuperAccessorID) { + def msg(using Context) = { + // The mixin containing a super-call that requires a super-accessor + val accMixin = acc.owner + // The class or trait that the super-accessor should resolve too in `base` + val otherMixin = other.owner + // The super-call in `accMixin` + val superCall = hl(i"super.$memberName") + // The super-call that the super-accesors in `base` forwards to + val resolvedSuperCall = hl(i"super[${otherMixin.name}].$memberName") + // The super-call that we would have called if `super` in traits behaved like it + // does in classes, i.e. followed the linearization of the trait itself. + val staticSuperCall = { + val staticSuper = accMixin.asClass.info.parents.reverse + .find(_.nonPrivateMember(memberName) + .matchingDenotation(accMixin.thisType, acc.info, targetName).exists) + val staticSuperName = staticSuper match { + case Some(parent) => + parent.classSymbol.name.show + case None => // Might be reachable under separate compilation + "SomeParent" } - ex"""$base cannot be defined due to a conflict between its parents when - |implementing a super-accessor for $memberName in $accMixin: - | - |1. One of its parent (${accMixin.name}) contains a call $superCall in its body, - | and when a super-call in a trait is written without an explicit parent - | listed in brackets, it is implemented by a generated super-accessor in - | the class that extends this trait based on the linearization order of - | the class. - |2. Because ${otherMixin.name} comes before ${accMixin.name} in the linearization - | order of ${base.name}, and because ${otherMixin.name} overrides $memberName, - | the super-accessor in ${base.name} is implemented as a call to - | $resolvedSuperCall. - |3. However, - | ${otherTp.widenExpr} (the type of $resolvedSuperCall in ${base.name}) - | is not a subtype of - | ${accTp.widenExpr} (the type of $memberName in $accMixin). - | Hence, the super-accessor that needs to be generated in ${base.name} - | is illegal. - | - |Here are two possible ways to resolve this: - | - |1. Change the linearization order of ${base.name} such that - | ${accMixin.name} comes before ${otherMixin.name}. - |2. Alternatively, replace $superCall in the body of $accMixin by a - | super-call to a specific parent, e.g. $staticSuperCall - |""".stripMargin + hl(i"super[$staticSuperName].$memberName") } - def explain = "" - } + i"""$base cannot be defined due to a conflict between its parents when + |implementing a super-accessor for $memberName in $accMixin: + | + |1. One of its parent (${accMixin.name}) contains a call $superCall in its body, + | and when a super-call in a trait is written without an explicit parent + | listed in brackets, it is implemented by a generated super-accessor in + | the class that extends this trait based on the linearization order of + | the class. + |2. Because ${otherMixin.name} comes before ${accMixin.name} in the linearization + | order of ${base.name}, and because ${otherMixin.name} overrides $memberName, + | the super-accessor in ${base.name} is implemented as a call to + | $resolvedSuperCall. + |3. However, + | ${otherTp.widenExpr} (the type of $resolvedSuperCall in ${base.name}) + | is not a subtype of + | ${accTp.widenExpr} (the type of $memberName in $accMixin). + | Hence, the super-accessor that needs to be generated in ${base.name} + | is illegal. + | + |Here are two possible ways to resolve this: + | + |1. Change the linearization order of ${base.name} such that + | ${accMixin.name} comes before ${otherMixin.name}. + |2. Alternatively, replace $superCall in the body of $accMixin by a + | super-call to a specific parent, e.g. $staticSuperCall + |""" + } + def explain(using Context) = "" +} + +class TraitParameterUsedAsParentPrefix(cls: Symbol)(using Context) + extends DeclarationMsg(TraitParameterUsedAsParentPrefixID) { + def msg(using Context) = + s"${cls.show} cannot extend from a parent that is derived via its own parameters" + def explain(using Context) = + i""" + |The parent class/trait that ${cls.show} extends from is obtained from + |the parameter of ${cls.show}. This is disallowed in order to prevent + |outer-related Null Pointer Exceptions in Scala. + | + |In order to fix this issue consider directly extending from the parent rather + |than obtaining it from the parameters of ${cls.show}. + |""" +} + +class UnknownNamedEnclosingClassOrObject(name: TypeName)(using Context) + extends ReferenceMsg(UnknownNamedEnclosingClassOrObjectID) { + def msg(using Context) = + i"""no enclosing class or object is named '${hl(name.show)}'""" + def explain(using Context) = + i""" + |The class or object named '${hl(name.show)}' was used as a visibility + |modifier, but could not be resolved. Make sure that + |'${hl(name.show)}' is not misspelled and has been imported into the + |current scope. + """ + } + +class IllegalCyclicTypeReference(sym: Symbol, where: String, lastChecked: Type)(using Context) + extends CyclicMsg(IllegalCyclicTypeReferenceID) { + def msg(using Context) = + val lastCheckedStr = + try lastChecked.show + catch case ex: CyclicReference => "..." + i"illegal cyclic type reference: ${where} ${hl(lastCheckedStr)} of $sym refers back to the type itself" + def explain(using Context) = "" +} + +class ErasedTypesCanOnlyBeFunctionTypes()(using Context) + extends SyntaxMsg(ErasedTypesCanOnlyBeFunctionTypesID) { + def msg(using Context) = "Types with erased keyword can only be function types `(erased ...) => ...`" + def explain(using Context) = "" +} + +class CaseClassMissingNonImplicitParamList(cdef: untpd.TypeDef)(using Context) + extends SyntaxMsg(CaseClassMissingNonImplicitParamListID) { + def msg(using Context) = + i"""|A ${hl("case class")} must have at least one leading non-implicit parameter list""" + + def explain(using Context) = + i"""|${cdef.name} must have at least one leading non-implicit parameter list, + | if you're aiming to have a case class parametrized only by implicit ones, you should + | add an explicit ${hl("()")} as the first parameter list to ${cdef.name}.""" +} + +class EnumerationsShouldNotBeEmpty(cdef: untpd.TypeDef)(using Context) + extends SyntaxMsg(EnumerationsShouldNotBeEmptyID) { + def msg(using Context) = "Enumerations must contain at least one case" + + def explain(using Context) = + i"""|Enumeration ${cdef.name} must contain at least one case + |Example Usage: + | ${hl("enum")} ${cdef.name} { + | ${hl("case")} Option1, Option2 + | } + |""" +} + +class TypedCaseDoesNotExplicitlyExtendTypedEnum(enumDef: Symbol, caseDef: untpd.TypeDef)(using Context) + extends SyntaxMsg(TypedCaseDoesNotExplicitlyExtendTypedEnumID) { + def msg(using Context) = i"explicit extends clause needed because both enum case and enum class have type parameters" + + def explain(using Context) = + i"""Enumerations where the enum class as well as the enum case have type parameters need + |an explicit extends. + |for example: + | ${hl("enum")} ${enumDef.name}[T] { + | ${hl("case")} ${caseDef.name}[U](u: U) ${hl("extends")} ${enumDef.name}[U] + | } + |""" +} + +class IllegalRedefinitionOfStandardKind(kindType: String, name: Name)(using Context) + extends SyntaxMsg(IllegalRedefinitionOfStandardKindID) { + def msg(using Context) = i"illegal redefinition of standard $kindType $name" + def explain(using Context) = + i"""| "$name" is a standard Scala core `$kindType` + | Please choose a different name to avoid conflicts + |""" +} + +class NoExtensionMethodAllowed(mdef: untpd.DefDef)(using Context) + extends SyntaxMsg(NoExtensionMethodAllowedID) { + def msg(using Context) = i"No extension method allowed here, since collective parameters are given" + def explain(using Context) = + i"""|Extension method: + | `${mdef}` + |is defined inside an extension clause which has collective parameters. + |""" +} - class TraitParameterUsedAsParentPrefix(cls: Symbol)(using Context) - extends DeclarationMsg(TraitParameterUsedAsParentPrefixID) { - def msg = - s"${cls.show} cannot extend from a parent that is derived via its own parameters" - def explain = - ex""" - |The parent class/trait that ${cls.show} extends from is obtained from - |the parameter of ${cls.show}. This is disallowed in order to prevent - |outer-related Null Pointer Exceptions in Scala. - | - |In order to fix this issue consider directly extending from the parent rather - |than obtaining it from the parameters of ${cls.show}. - |""".stripMargin - } +class ExtensionMethodCannotHaveTypeParams(mdef: untpd.DefDef)(using Context) + extends SyntaxMsg(ExtensionMethodCannotHaveTypeParamsID) { + def msg(using Context) = i"Extension method cannot have type parameters since some were already given previously" - class UnknownNamedEnclosingClassOrObject(name: TypeName)(using Context) - extends ReferenceMsg(UnknownNamedEnclosingClassOrObjectID) { - def msg = - em"""no enclosing class or object is named '${hl(name.show)}'""" - def explain = - ex""" - |The class or object named '${hl(name.show)}' was used as a visibility - |modifier, but could not be resolved. Make sure that - |'${hl(name.show)}' is not misspelled and has been imported into the - |current scope. - """.stripMargin + def explain(using Context) = + i"""|Extension method: + | `${mdef}` + |has type parameters `[${mdef.leadingTypeParams.map(_.show).mkString(",")}]`, while the extension clause has + |it's own type parameters. Please consider moving these to the extension clause's type parameter list. + |""" +} + +class ExtensionCanOnlyHaveDefs(mdef: untpd.Tree)(using Context) + extends SyntaxMsg(ExtensionCanOnlyHaveDefsID) { + def msg(using Context) = i"Only methods allowed here, since collective parameters are given" + def explain(using Context) = + i"""Extension clauses can only have `def`s + | `${mdef.show}` is not a valid expression here. + |""" +} + +class UnexpectedPatternForSummonFrom(tree: Tree[_])(using Context) + extends SyntaxMsg(UnexpectedPatternForSummonFromID) { + def msg(using Context) = i"Unexpected pattern for summonFrom. Expected ${hl("`x: T`")} or ${hl("`_`")}" + def explain(using Context) = + i"""|The pattern "${tree.show}" provided in the ${hl("case")} expression of the ${hl("summonFrom")}, + | needs to be of the form ${hl("`x: T`")} or ${hl("`_`")}. + | + | Example usage: + | inline def a = summonFrom { + | case x: T => ??? + | } + | + | or + | inline def a = summonFrom { + | case _ => ??? + | } + |""" +} + +class AnonymousInstanceCannotBeEmpty(impl: untpd.Template)(using Context) + extends SyntaxMsg(AnonymousInstanceCannotBeEmptyID) { + def msg(using Context) = i"anonymous instance must implement a type or have at least one extension method" + def explain(using Context) = + i"""|Anonymous instances cannot be defined with an empty body. The block + |`${impl.show}` should either contain an implemented type or at least one extension method. + |""" +} + +class ModifierNotAllowedForDefinition(flag: Flag)(using Context) + extends SyntaxMsg(ModifierNotAllowedForDefinitionID) { + def msg(using Context) = i"Modifier ${hl(flag.flagsString)} is not allowed for this definition" + def explain(using Context) = "" +} + +class RedundantModifier(flag: Flag)(using Context) + extends SyntaxMsg(RedundantModifierID) { + def msg(using Context) = i"Modifier ${hl(flag.flagsString)} is redundant for this definition" + def explain(using Context) = "" +} + +class InvalidReferenceInImplicitNotFoundAnnotation(typeVar: String, owner: String)(using Context) + extends ReferenceMsg(InvalidReferenceInImplicitNotFoundAnnotationID) { + def msg(using Context) = i"""|Invalid reference to a type variable ${hl(typeVar)} found in the annotation argument. + |The variable does not occur as a parameter in the scope of ${hl(owner)}. + |""" + def explain(using Context) = "" +} + +class CaseClassInInlinedCode(tree: tpd.Tree)(using Context) + extends SyntaxMsg(CaseClassInInlinedCodeID) { + + def defKind = if tree.symbol.is(Module) then "object" else "class" + def msg(using Context) = s"Case $defKind definitions are not allowed in inline methods or quoted code. Use a normal $defKind instead." + def explain(using Context) = + i"""Case class/object definitions generate a considerable footprint in code size. + |Inlining such definition would multiply this footprint for each call site. + |""" +} + +class ImplicitSearchTooLargeWarning(limit: Int, openSearchPairs: List[(Candidate, Type)])(using Context) + extends TypeMsg(ImplicitSearchTooLargeID): + override def showAlways = true + def showQuery(query: (Candidate, Type))(using Context): String = + i" ${query._1.ref.symbol.showLocated} for ${query._2}}" + def msg(using Context) = + i"""Implicit search problem too large. + |an implicit search was terminated with failure after trying $limit expressions. + |The root candidate for the search was: + | + |${showQuery(openSearchPairs.last)} + | + |You can change the behavior by setting the `-Ximplicit-search-limit` value. + |Smaller values cause the search to fail faster. + |Larger values might make a very large search problem succeed. + |""" + def explain(using Context) = + i"""The overflow happened with the following lists of tried expressions and target types, + |starting with the root query: + | + |${openSearchPairs.reverse.map(showQuery)}%\n% + """ + +class TargetNameOnTopLevelClass(symbol: Symbol)(using Context) +extends SyntaxMsg(TargetNameOnTopLevelClassID): + def msg(using Context) = i"${hl("@targetName")} annotation not allowed on top-level $symbol" + def explain(using Context) = + val annot = symbol.getAnnotation(defn.TargetNameAnnot).get + i"""The @targetName annotation may be applied to a top-level ${hl("val")} or ${hl("def")}, but not + |a top-level ${hl("class")}, ${hl("trait")}, or ${hl("object")}. + | + |This restriction is due to the naming convention of Java classfiles, whose filenames + |are based on the name of the class defined within. If @targetName were permitted + |here, the name of the classfile would be based on the target name, and the compiler + |could not associate that classfile with the Scala-visible defined name of the class. + | + |If your use case requires @targetName, consider wrapping $symbol in an ${hl("object")} + |(and possibly exporting it), as in the following example: + | + |${hl("object Wrapper:")} + | $annot $symbol { ... } + | + |${hl("export")} Wrapper.${symbol.name} ${hl("// optional")}""" + +class NotClassType(tp: Type)(using Context) +extends TypeMsg(NotClassTypeID), ShowMatchTrace(tp): + def msg(using Context) = i"$tp is not a class type" + def explain(using Context) = "" + +class MissingImplicitArgument( + arg: tpd.Tree, + pt: Type, + where: String, + paramSymWithMethodCallTree: Option[(Symbol, tpd.Tree)] = None, + ignoredInstanceNormalImport: => Option[SearchSuccess], + ignoredConvertibleImplicits: => Iterable[TermRef] + )(using Context) extends TypeMsg(MissingImplicitArgumentID), ShowMatchTrace(pt): + + arg.tpe match + case ambi: AmbiguousImplicits => withoutDisambiguation() + case _ => + + /** Format `raw` implicitNotFound or implicitAmbiguous argument, replacing + * all occurrences of `${X}` where `X` is in `paramNames` with the + * corresponding shown type in `args`. + */ + def userDefinedErrorString(raw: String, paramNames: List[String], args: List[Type])(using Context): String = + def translate(name: String): Option[String] = + val idx = paramNames.indexOf(name) + if (idx >= 0) Some(i"${args(idx)}") else None + """\$\{\s*([^}\s]+)\s*\}""".r.replaceAllIn(raw, (_: Regex.Match) match + case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("")).nn + ) + + /** @param rawMsg Message template with variables, e.g. "Variable A is ${A}" + * @param sym Symbol of the annotated type or of the method whose parameter was annotated + * @param substituteType Function substituting specific types for abstract types associated with variables, e.g A -> Int + */ + def formatAnnotationMessage(rawMsg: String, sym: Symbol, substituteType: Type => Type)(using Context): String = + val substitutableTypesSymbols = substitutableTypeSymbolsInScope(sym) + userDefinedErrorString( + rawMsg, + paramNames = substitutableTypesSymbols.map(_.name.unexpandedName.toString), + args = substitutableTypesSymbols.map(_.typeRef).map(substituteType) + ) + + /** Extract a user defined error message from a symbol `sym` + * with an annotation matching the given class symbol `cls`. + */ + def userDefinedMsg(sym: Symbol, cls: Symbol)(using Context) = + for + ann <- sym.getAnnotation(cls) + msg <- ann.argumentConstantString(0) + yield msg + + def userDefinedImplicitNotFoundTypeMessageFor(sym: Symbol)(using Context): Option[String] = + for + rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) + if Feature.migrateTo3 || sym != defn.Function1 + // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore + yield + val substituteType = (_: Type).asSeenFrom(pt, sym) + formatAnnotationMessage(rawMsg, sym, substituteType) + + /** Extracting the message from a method parameter, e.g. in + * + * trait Foo + * + * def foo(implicit @annotation.implicitNotFound("Foo is missing") foo: Foo): Any = ??? + */ + def userDefinedImplicitNotFoundParamMessage(using Context): Option[String] = + paramSymWithMethodCallTree.flatMap: (sym, applTree) => + userDefinedMsg(sym, defn.ImplicitNotFoundAnnot).map: rawMsg => + val fn = tpd.funPart(applTree) + val targs = tpd.typeArgss(applTree).flatten + val methodOwner = fn.symbol.owner + val methodOwnerType = tpd.qualifier(fn).tpe + val methodTypeParams = fn.symbol.paramSymss.flatten.filter(_.isType) + val methodTypeArgs = targs.map(_.tpe) + val substituteType = (_: Type).asSeenFrom(methodOwnerType, methodOwner).subst(methodTypeParams, methodTypeArgs) + formatAnnotationMessage(rawMsg, sym.owner, substituteType) + + def userDefinedImplicitNotFoundTypeMessage(using Context): Option[String] = + def recur(tp: Type): Option[String] = tp match + case tp: TypeRef => + val sym = tp.symbol + userDefinedImplicitNotFoundTypeMessageFor(sym).orElse(recur(tp.info)) + case tp: ClassInfo => + tp.baseClasses.iterator + .map(userDefinedImplicitNotFoundTypeMessageFor) + .find(_.isDefined).flatten + case tp: TypeProxy => + recur(tp.superType) + case tp: AndType => + recur(tp.tp1).orElse(recur(tp.tp2)) + case _ => + None + recur(pt) + + /** The implicitNotFound annotation on the parameter, or else on the type. + * implicitNotFound message strings starting with `explain=` are intended for + * additional explanations, not the message proper. The leading `explain=` is + * dropped in this case. + * @param explain The message is used for an additional explanation, not + * the message proper. + */ + def userDefinedImplicitNotFoundMessage(explain: Boolean)(using Context): Option[String] = + val explainTag = "explain=" + def filter(msg: Option[String]) = msg match + case Some(str) => + if str.startsWith(explainTag) then + if explain then Some(str.drop(explainTag.length)) else None + else if explain then None + else msg + case None => None + filter(userDefinedImplicitNotFoundParamMessage) + .orElse(filter(userDefinedImplicitNotFoundTypeMessage)) + + object AmbiguousImplicitMsg { + def unapply(search: SearchSuccess): Option[String] = + userDefinedMsg(search.ref.symbol, defn.ImplicitAmbiguousAnnot) + } + + def msg(using Context): String = + + def formatMsg(shortForm: String)(headline: String = shortForm) = arg match + case arg: Trees.SearchFailureIdent[?] => + arg.tpe match + case _: NoMatchingImplicits => headline + case tpe: SearchFailureType => + i"$headline. ${tpe.explanation}" + case _ => headline + case _ => + arg.tpe match + case tpe: SearchFailureType => + val original = arg match + case Inlined(call, _, _) => call + case _ => arg + i"""$headline. + |I found: + | + | ${original.show.replace("\n", "\n ")} + | + |But ${tpe.explanation}.""" + case _ => headline + + def location(preposition: String) = if (where.isEmpty) "" else s" $preposition $where" + + def defaultAmbiguousImplicitMsg(ambi: AmbiguousImplicits) = + s"Ambiguous given instances: ${ambi.explanation}${location("of")}" + + def defaultImplicitNotFoundMessage = + i"No given instance of type $pt was found${location("for")}" + + /** Construct a custom error message given an ambiguous implicit + * candidate `alt` and a user defined message `raw`. + */ + def userDefinedAmbiguousImplicitMsg(alt: SearchSuccess, raw: String) = { + val params = alt.ref.underlying match { + case p: PolyType => p.paramNames.map(_.toString) + case _ => Nil + } + def resolveTypes(targs: List[tpd.Tree])(using Context) = + targs.map(a => Inferencing.fullyDefinedType(a.tpe, "type argument", a.srcPos)) + + // We can extract type arguments from: + // - a function call: + // @implicitAmbiguous("msg A=${A}") + // implicit def f[A](): String = ... + // implicitly[String] // found: f[Any]() + // + // - an eta-expanded function: + // @implicitAmbiguous("msg A=${A}") + // implicit def f[A](x: Int): String = ... + // implicitly[Int => String] // found: x => f[Any](x) + + val call = tpd.closureBody(alt.tree) // the tree itself if not a closure + val targs = tpd.typeArgss(call).flatten + val args = resolveTypes(targs)(using ctx.fresh.setTyperState(alt.tstate)) + userDefinedErrorString(raw, params, args) } - class IllegalCyclicTypeReference(sym: Symbol, where: String, lastChecked: Type)(using Context) - extends CyclicMsg(IllegalCyclicTypeReferenceID) { - def msg = - val lastCheckedStr = - try lastChecked.show - catch case ex: CyclicReference => "..." - i"illegal cyclic type reference: ${where} ${hl(lastCheckedStr)} of $sym refers back to the type itself" - def explain = "" - } - - class ErasedTypesCanOnlyBeFunctionTypes()(using Context) - extends SyntaxMsg(ErasedTypesCanOnlyBeFunctionTypesID) { - def msg = "Types with erased keyword can only be function types `(erased ...) => ...`" - def explain = "" - } - - class CaseClassMissingNonImplicitParamList(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(CaseClassMissingNonImplicitParamListID) { - def msg = - em"""|A ${hl("case class")} must have at least one leading non-implicit parameter list""" - - def explain = - em"""|${cdef.name} must have at least one leading non-implicit parameter list, - | if you're aiming to have a case class parametrized only by implicit ones, you should - | add an explicit ${hl("()")} as the first parameter list to ${cdef.name}.""".stripMargin - } - - class EnumerationsShouldNotBeEmpty(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(EnumerationsShouldNotBeEmptyID) { - def msg = "Enumerations must contain at least one case" - - def explain = - em"""|Enumeration ${cdef.name} must contain at least one case - |Example Usage: - | ${hl("enum")} ${cdef.name} { - | ${hl("case")} Option1, Option2 - | } - |""".stripMargin - } - - class TypedCaseDoesNotExplicitlyExtendTypedEnum(enumDef: Symbol, caseDef: untpd.TypeDef)(using Context) - extends SyntaxMsg(TypedCaseDoesNotExplicitlyExtendTypedEnumID) { - def msg = i"explicit extends clause needed because both enum case and enum class have type parameters" - - def explain = - em"""Enumerations where the enum class as well as the enum case have type parameters need - |an explicit extends. - |for example: - | ${hl("enum")} ${enumDef.name}[T] { - | ${hl("case")} ${caseDef.name}[U](u: U) ${hl("extends")} ${enumDef.name}[U] - | } - |""".stripMargin - } - - class IllegalRedefinitionOfStandardKind(kindType: String, name: Name)(using Context) - extends SyntaxMsg(IllegalRedefinitionOfStandardKindID) { - def msg = em"illegal redefinition of standard $kindType $name" - def explain = - em"""| "$name" is a standard Scala core `$kindType` - | Please choose a different name to avoid conflicts - |""".stripMargin - } - - class NoExtensionMethodAllowed(mdef: untpd.DefDef)(using Context) - extends SyntaxMsg(NoExtensionMethodAllowedID) { - def msg = em"No extension method allowed here, since collective parameters are given" - def explain = - em"""|Extension method: - | `${mdef}` - |is defined inside an extension clause which has collective parameters. - |""".stripMargin - } - - class ExtensionMethodCannotHaveTypeParams(mdef: untpd.DefDef)(using Context) - extends SyntaxMsg(ExtensionMethodCannotHaveTypeParamsID) { - def msg = i"Extension method cannot have type parameters since some were already given previously" - - def explain = - em"""|Extension method: - | `${mdef}` - |has type parameters `[${mdef.leadingTypeParams.map(_.show).mkString(",")}]`, while the extension clause has - |it's own type parameters. Please consider moving these to the extension clause's type parameter list. - |""".stripMargin - } - - class ExtensionCanOnlyHaveDefs(mdef: untpd.Tree)(using Context) - extends SyntaxMsg(ExtensionCanOnlyHaveDefsID) { - def msg = em"Only methods allowed here, since collective parameters are given" - def explain = - em"""Extension clauses can only have `def`s - | `${mdef.show}` is not a valid expression here. - |""".stripMargin - } - - class UnexpectedPatternForSummonFrom(tree: Tree[_])(using Context) - extends SyntaxMsg(UnexpectedPatternForSummonFromID) { - def msg = em"Unexpected pattern for summonFrom. Expected ${hl("`x: T`")} or ${hl("`_`")}" - def explain = - em"""|The pattern "${tree.show}" provided in the ${hl("case")} expression of the ${hl("summonFrom")}, - | needs to be of the form ${hl("`x: T`")} or ${hl("`_`")}. - | - | Example usage: - | inline def a = summonFrom { - | case x: T => ??? - | } - | - | or - | inline def a = summonFrom { - | case _ => ??? - | } - |""".stripMargin - } - - class AnonymousInstanceCannotBeEmpty(impl: untpd.Template)(using Context) - extends SyntaxMsg(AnonymousInstanceCannotBeEmptyID) { - def msg = i"anonymous instance must implement a type or have at least one extension method" - def explain = - em"""|Anonymous instances cannot be defined with an empty body. The block - |`${impl.show}` should either contain an implemented type or at least one extension method. - |""".stripMargin - } - - class ModifierNotAllowedForDefinition(flag: Flag)(using Context) - extends SyntaxMsg(ModifierNotAllowedForDefinitionID) { - def msg = em"Modifier ${hl(flag.flagsString)} is not allowed for this definition" - def explain = "" - } - - class RedundantModifier(flag: Flag)(using Context) - extends SyntaxMsg(RedundantModifierID) { - def msg = em"Modifier ${hl(flag.flagsString)} is redundant for this definition" - def explain = "" - } - - class InvalidReferenceInImplicitNotFoundAnnotation(typeVar: String, owner: String)(using Context) - extends ReferenceMsg(InvalidReferenceInImplicitNotFoundAnnotationID) { - def msg = em"""|Invalid reference to a type variable ${hl(typeVar)} found in the annotation argument. - |The variable does not occur as a parameter in the scope of ${hl(owner)}. - |""".stripMargin - def explain = "" - } - - class CaseClassInInlinedCode(tree: tpd.Tree)(using Context) - extends SyntaxMsg(CaseClassInInlinedCodeID) { - - def defKind = if tree.symbol.is(Module) then "object" else "class" - def msg = s"Case $defKind definitions are not allowed in inline methods or quoted code. Use a normal $defKind instead." - def explain = - em"""Case class/object definitions generate a considerable footprint in code size. - |Inlining such definition would multiply this footprint for each call site. - |""".stripMargin - } - - class ImplicitSearchTooLargeWarning(limit: Int, openSearchPairs: List[(Candidate, Type)])(using Context) - extends TypeMsg(ImplicitSearchTooLargeID): - override def showAlways = true - def showQuery(query: (Candidate, Type)): String = - i" ${query._1.ref.symbol.showLocated} for ${query._2}}" - def msg = - em"""Implicit search problem too large. - |an implicit search was terminated with failure after trying $limit expressions. - |The root candidate for the search was: - | - |${showQuery(openSearchPairs.last)} - | - |You can change the behavior by setting the `-Ximplicit-search-limit` value. - |Smaller values cause the search to fail faster. - |Larger values might make a very large search problem succeed. - |""" - def explain = - em"""The overflow happened with the following lists of tried expressions and target types, - |starting with the root query: - | - |${openSearchPairs.reverse.map(showQuery)}%\n% - """ - - class TargetNameOnTopLevelClass(symbol: Symbol)(using Context) - extends SyntaxMsg(TargetNameOnTopLevelClassID): - def msg = em"${hl("@targetName")} annotation not allowed on top-level $symbol" - def explain = - val annot = symbol.getAnnotation(defn.TargetNameAnnot).get - em"""The @targetName annotation may be applied to a top-level ${hl("val")} or ${hl("def")}, but not - |a top-level ${hl("class")}, ${hl("trait")}, or ${hl("object")}. - | - |This restriction is due to the naming convention of Java classfiles, whose filenames - |are based on the name of the class defined within. If @targetName were permitted - |here, the name of the classfile would be based on the target name, and the compiler - |could not associate that classfile with the Scala-visible defined name of the class. - | - |If your use case requires @targetName, consider wrapping $symbol in an ${hl("object")} - |(and possibly exporting it), as in the following example: - | - |${hl("object Wrapper:")} - | $annot $symbol { ... } - | - |${hl("export")} Wrapper.${symbol.name} ${hl("// optional")}""" + /** Extracting the message from a type, e.g. in + * + * @annotation.implicitNotFound("Foo is missing") + * trait Foo + * + * def foo(implicit foo: Foo): Any = ??? + */ + arg.tpe match + case ambi: AmbiguousImplicits => + (ambi.alt1, ambi.alt2) match + case (alt @ AmbiguousImplicitMsg(msg), _) => + userDefinedAmbiguousImplicitMsg(alt, msg) + case (_, alt @ AmbiguousImplicitMsg(msg)) => + userDefinedAmbiguousImplicitMsg(alt, msg) + case _ => + defaultAmbiguousImplicitMsg(ambi) + case ambi @ TooUnspecific(target) => + i"""No implicit search was attempted${location("for")} + |since the expected type $target is not specific enough""" + case _ => + val shortMessage = userDefinedImplicitNotFoundMessage(explain = false) + .getOrElse(defaultImplicitNotFoundMessage) + formatMsg(shortMessage)() + end msg + + override def msgPostscript(using Context) = + arg.tpe match + case _: AmbiguousImplicits => + "" // show no disambiguation + case _: TooUnspecific => + super.msgPostscript // show just disambigutation and match type trace + case _ => + // show all available additional info + def hiddenImplicitNote(s: SearchSuccess) = + i"\n\nNote: ${s.ref.symbol.showLocated} was not considered because it was not imported with `import given`." + def showImplicitAndConversions(imp: TermRef, convs: Iterable[TermRef]) = + i"\n- ${imp.symbol.showDcl}${convs.map(c => "\n - " + c.symbol.showDcl).mkString}" + def noChainConversionsNote(ignoredConvertibleImplicits: Iterable[TermRef]): Option[String] = + Option.when(ignoredConvertibleImplicits.nonEmpty)( + i"\n\nNote: implicit conversions are not automatically applied to arguments of using clauses. " + + i"You will have to pass the argument explicitly.\n" + + i"The following implicits in scope can be implicitly converted to ${pt.show}:" + + ignoredConvertibleImplicits.map { imp => s"\n- ${imp.symbol.showDcl}"}.mkString + ) + super.msgPostscript + ++ ignoredInstanceNormalImport.map(hiddenImplicitNote) + .orElse(noChainConversionsNote(ignoredConvertibleImplicits)) + .getOrElse(ctx.typer.importSuggestionAddendum(pt)) + + def explain(using Context) = userDefinedImplicitNotFoundMessage(explain = true) + .getOrElse("") +end MissingImplicitArgument + +class CannotBeAccessed(tpe: NamedType, superAccess: Boolean)(using Context) +extends ReferenceMsg(CannotBeAccessedID): + def msg(using Context) = + val pre = tpe.prefix + val name = tpe.name + val alts = tpe.denot.alternatives.map(_.symbol).filter(_.exists) + val whatCanNot = alts match + case Nil => + i"$name cannot" + case sym :: Nil => + i"${if (sym.owner == pre.typeSymbol) sym.show else sym.showLocated} cannot" + case _ => + i"none of the overloaded alternatives named $name can" + val where = if (ctx.owner.exists) s" from ${ctx.owner.enclosingClass}" else "" + val whyNot = new StringBuffer + alts.foreach(_.isAccessibleFrom(pre, superAccess, whyNot)) + i"$whatCanNot be accessed as a member of $pre$where.$whyNot" + def explain(using Context) = "" + +class InlineGivenShouldNotBeFunction()(using Context) +extends SyntaxMsg(InlineGivenShouldNotBeFunctionID): + def msg(using Context) = + i"""An inline given alias with a function value as right-hand side can significantly increase + |generated code size. You should either drop the `inline` or rewrite the given with an + |explicit `apply` method.""" + def explain(using Context) = + i"""A function value on the right-hand side of an inline given alias expands to + |an anonymous class. Each application of the inline given will then create a + |fresh copy of that class, which can increase code size in surprising ways. + |For that reason, functions are discouraged as right hand sides of inline given aliases. + |You should either drop `inline` or rewrite to an explicit `apply` method. E.g. + | + | inline given Conversion[A, B] = x => x.toB + | + |should be re-formulated as + | + | given Conversion[A, B] with + | inline def apply(x: A) = x.toB + """ + +class ValueDiscarding(tp: Type)(using Context) + extends Message(ValueDiscardingID): + def kind = MessageKind.PotentialIssue + def msg(using Context) = i"discarded non-Unit value of type $tp" + def explain(using Context) = "" - class NotClassType(tp: Type)(using Context) - extends TypeMsg(NotClassTypeID), ShowMatchTrace(tp): - def msg = ex"$tp is not a class type" - def explain = "" +class UnusedNonUnitValue(tp: Type)(using Context) + extends Message(UnusedNonUnitValueID): + def kind = MessageKind.PotentialIssue + def msg(using Context) = i"unused value of type $tp" + def explain(using Context) = "" +class MatchTypeScrutineeCannotBeHigherKinded(tp: Type)(using Context) + extends TypeMsg(MatchTypeScrutineeCannotBeHigherKindedID) : + def msg(using Context) = i"the scrutinee of a match type cannot be higher-kinded" + def explain(using Context) = "" diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala index 7c114b51ed21..8e8d3efb8b40 100644 --- a/compiler/src/dotty/tools/dotc/reporting/trace.scala +++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala @@ -4,10 +4,11 @@ package reporting import scala.language.unsafeNulls -import core.Contexts._ -import config.Config -import config.Printers -import core.Mode +import core.*, Contexts.*, Decorators.* +import config.* +import printing.Formatting.* + +import scala.compiletime.* /** Exposes the {{{ trace("question") { op } }}} syntax. * @@ -51,9 +52,20 @@ trait TraceSyntax: else op inline def apply[T](inline question: String, inline printer: Printers.Printer, inline show: Boolean)(inline op: T)(using Context): T = - inline if isEnabled then - doTrace[T](question, printer, if show then showShowable(_) else alwaysToString)(op) - else op + apply(question, printer, { + val showOp: T => String = inline if show == true then + val showT = summonInline[Show[T]] + { + given Show[T] = showT + t => i"$t" + } + else + summonFrom { + case given Show[T] => t => i"$t" + case _ => alwaysToString + } + showOp + })(op) inline def apply[T](inline question: String, inline printer: Printers.Printer)(inline op: T)(using Context): T = apply[T](question, printer, false)(op) @@ -64,15 +76,11 @@ trait TraceSyntax: inline def apply[T](inline question: String)(inline op: T)(using Context): T = apply[T](question, false)(op) - private def showShowable(x: Any)(using Context) = x match - case x: printing.Showable => x.show - case _ => String.valueOf(x) - private val alwaysToString = (x: Any) => String.valueOf(x) private def doTrace[T](question: => String, printer: Printers.Printer = Printers.default, - showOp: T => String = alwaysToString) + showOp: T => String) (op: => T)(using Context): T = if ctx.mode.is(Mode.Printing) || !isForced && (printer eq Printers.noPrinter) then op else diff --git a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala index 96e88e5c68ae..f2dfac88d464 100644 --- a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala +++ b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala @@ -23,10 +23,7 @@ object Rewrites { private[Rewrites] val pbuf = new mutable.ListBuffer[Patch]() def addPatch(span: Span, replacement: String): Unit = - pbuf.indexWhere(p => p.span.start == span.start && p.span.end == span.end) match { - case i if i >= 0 => pbuf.update(i, Patch(span, replacement)) - case _ => pbuf += Patch(span, replacement) - } + pbuf += Patch(span, replacement) def apply(cs: Array[Char]): Array[Char] = { val delta = pbuf.map(_.delta).sum diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index e561b26abf6d..f54baeb7256c 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -737,8 +737,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { var h = initHash p match - case p: WithLazyField[?] => - p.forceIfLazy + case p: WithLazyFields => p.forceFields() case _ => if inlineOrigin.exists then diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index f7b15dc21eb0..fe5c8d061c78 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -143,34 +143,7 @@ class ExtractDependencies extends Phase { def allowLocal = dep.context == DependencyByInheritance || dep.context == LocalDependencyByInheritance if (depFile.extension == "class") { // Dependency is external -- source is undefined - - // The fully qualified name on the JVM of the class corresponding to `dep.to` - val binaryClassName = { - val builder = new StringBuilder - val pkg = dep.to.enclosingPackageClass - if (!pkg.isEffectiveRoot) { - builder.append(pkg.fullName.mangledString) - builder.append(".") - } - val flatName = dep.to.flatName - // Some companion objects are fake (that is, they're a compiler fiction - // that doesn't correspond to a class that exists at runtime), this - // can happen in two cases: - // - If a Java class has static members. - // - If we create constructor proxies for a class (see NamerOps#addConstructorProxies). - // - // In both cases it's vital that we don't send the object name to - // zinc: when sbt is restarted, zinc will inspect the binary - // dependencies to see if they're still on the classpath, if it - // doesn't find them it will invalidate whatever referenced them, so - // any reference to a fake companion will lead to extra recompilations. - // Instead, use the class name since it's guaranteed to exist at runtime. - val clsFlatName = if (dep.to.isOneOf(JavaDefined | ConstructorProxy)) flatName.stripModuleClassSuffix else flatName - builder.append(clsFlatName.mangledString) - builder.toString - } - - processExternalDependency(depFile, binaryClassName) + processExternalDependency(depFile, dep.to.binaryClassName) } else if (allowLocal || depFile.file != sourceFile) { // We cannot ignore dependencies coming from the same source file because // the dependency info needs to propagate. See source-dependencies/trait-trait-211. @@ -190,7 +163,7 @@ object ExtractDependencies { /** Report an internal error in incremental compilation. */ def internalError(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = - report.error(s"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) + report.error(em"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) } private case class ClassDependency(from: Symbol, to: Symbol, context: DependencyContext) @@ -333,6 +306,13 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT } } + private def addInheritanceDependencies(tree: Closure)(using Context): Unit = + // If the tpt is empty, this is a non-SAM lambda, so no need to register + // an inheritance relationship. + if !tree.tpt.isEmpty then + val from = resolveDependencySource + _dependencies += ClassDependency(from, tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) + private def addInheritanceDependencies(tree: Template)(using Context): Unit = if (tree.parents.nonEmpty) { val depContext = depContextOf(tree.symbol.owner) @@ -396,6 +376,8 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT case ref: RefTree => addMemberRefDependency(ref.symbol) addTypeDependency(ref.tpe) + case t: Closure => + addInheritanceDependencies(t) case t: Template => addInheritanceDependencies(t) case _ => diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 071efb1fb91c..91614aaccad2 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -24,6 +24,7 @@ import scala.annotation.{ threadUnsafe => tu, tailrec } import scala.PartialFunction.condOpt import dotty.tools.dotc.{semanticdb => s} +import dotty.tools.io.{AbstractFile, JarArchive} /** Extract symbol references and uses to semanticdb files. * See https://scalameta.org/docs/semanticdb/specification.html#symbol-1 @@ -38,7 +39,9 @@ class ExtractSemanticDB extends Phase: override val description: String = ExtractSemanticDB.description override def isRunnable(using Context) = - super.isRunnable && ctx.settings.Xsemanticdb.value + import ExtractSemanticDB.{semanticdbTarget, outputDirectory} + def writesToOutputJar = semanticdbTarget.isEmpty && outputDirectory.isInstanceOf[JarArchive] + super.isRunnable && ctx.settings.Xsemanticdb.value && !writesToOutputJar // Check not needed since it does not transform trees override def isCheckable: Boolean = false @@ -187,7 +190,7 @@ class ExtractSemanticDB extends Phase: registerUseGuarded(None, privateWithin, spanOfSymbol(privateWithin, tree.span, tree.source), tree.source) else if !excludeSymbol(tree.symbol) then registerSymbol(tree.symbol, symbolKinds(tree)) - case tree: Template if tree.symbol.owner.is(Invisible) => + case tree: Template if tree.symbol != NoSymbol && tree.symbol.owner.is(Invisible) => // do nothing // exclude the symbols and synthetics generated by @main annotation // (main class generated by @main has `Invisible` flag, see `MainProxies.scala`). @@ -198,7 +201,7 @@ class ExtractSemanticDB extends Phase: val selfSpan = tree.self.span if selfSpan.exists && selfSpan.hasLength then traverse(tree.self) - if tree.symbol.owner.isEnumClass then + if tree.symbol != NoSymbol && tree.symbol.owner.isEnumClass then tree.body.foreachUntilImport(traverse).foreach(traverse) // the first import statement else tree.body.foreach(traverse) @@ -475,6 +478,13 @@ object ExtractSemanticDB: val name: String = "extractSemanticDB" val description: String = "extract info into .semanticdb files" + private def semanticdbTarget(using Context): Option[Path] = + Option(ctx.settings.semanticdbTarget.value) + .filterNot(_.isEmpty) + .map(Paths.get(_)) + + private def outputDirectory(using Context): AbstractFile = ctx.settings.outputDir.value + def write( source: SourceFile, occurrences: List[SymbolOccurrence], @@ -482,14 +492,8 @@ object ExtractSemanticDB: synthetics: List[Synthetic], )(using Context): Unit = def absolutePath(path: Path): Path = path.toAbsolutePath.normalize - val semanticdbTarget = - val semanticdbTargetSetting = ctx.settings.semanticdbTarget.value - absolutePath( - if semanticdbTargetSetting.isEmpty then ctx.settings.outputDir.value.jpath - else Paths.get(semanticdbTargetSetting) - ) val relPath = SourceFile.relativePath(source, ctx.settings.sourceroot.value) - val outpath = semanticdbTarget + val outpath = absolutePath(semanticdbTarget.getOrElse(outputDirectory.jpath)) .resolve("META-INF") .resolve("semanticdb") .resolve(relPath) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala index 6814d923a062..b53ee787f501 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala @@ -196,6 +196,10 @@ class SymbolInformationPrinter (symtab: PrinterSymtab): s"${pprint(caseType.key)} => ${pprint(caseType.body)}" }.mkString(", ") s"${pprint(scrutinee)} match { ${casesStr} }" + case LambdaType(tparams, res) => + val params = tparams.infos.map(_.displayName).mkString("[", ", ", "]") + val resType = normal(res) + s"$params =>> $resType" case x => "" diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala index c825032373f8..c7b0dfd437db 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala @@ -74,7 +74,9 @@ class SemanticSymbolBuilder: def addOwner(owner: Symbol): Unit = if !owner.isRoot then addSymName(b, owner) - def addOverloadIdx(sym: Symbol): Unit = + def addOverloadIdx(initSym: Symbol): Unit = + // revert from the compiler-generated overload of the signature polymorphic method + val sym = initSym.originalSignaturePolymorphic.symbol.orElse(initSym) val decls = val decls0 = sym.owner.info.decls.lookupAll(sym.name) if sym.owner.isAllOf(JavaModule) then diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala index 0ccaab48889a..b0d032c7d83b 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala @@ -483,9 +483,23 @@ class TypeOps: case NoPrefix => s.Type.Empty - // Not yet supported - case _: HKTypeLambda => - s.Type.Empty + case lambda: HKTypeLambda => + val paramSyms: List[SemanticSymbol] = lambda.paramNames.zip(lambda.paramInfos).map { (paramName, bounds) => + // def x[T[_]] = ??? + if paramName.isWildcard then + WildcardTypeSymbol(sym, bounds).tap(registerFakeSymbol) + else + paramRefSymtab.lookup(lambda, paramName).getOrElse { + TypeParamRefSymbol(sym, paramName, bounds).tap(registerFakeSymbol) + } + } + val parameters = + paramSyms.sscopeOpt(using LinkMode.HardlinkChildren) + val resType = loop(lambda.resType) + s.LambdaType( + parameters, + resType + ) case tvar: TypeVar => loop(tvar.stripped) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala index da24b4847e19..be9cc6034f2c 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala @@ -39,6 +39,7 @@ object Type { case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ByNameType => __v.value case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType => __v.value case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.MatchType => __v.value + case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType => __v.value case dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty => Empty } override def toBase(__custom: dotty.tools.dotc.semanticdb.Type): dotty.tools.dotc.semanticdb.TypeMessage = dotty.tools.dotc.semanticdb.TypeMessage(__custom match { @@ -57,6 +58,7 @@ object Type { case __v: dotty.tools.dotc.semanticdb.ByNameType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ByNameType(__v) case __v: dotty.tools.dotc.semanticdb.RepeatedType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType(__v) case __v: dotty.tools.dotc.semanticdb.MatchType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.MatchType(__v) + case __v: dotty.tools.dotc.semanticdb.LambdaType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__v) case Empty => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty }) } @@ -129,6 +131,10 @@ final case class TypeMessage( val __value = sealedValue.matchType.get __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; + if (sealedValue.lambdaType.isDefined) { + val __value = sealedValue.lambdaType.get + __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize + }; __size } override def serializedSize: _root_.scala.Int = { @@ -231,6 +237,12 @@ final case class TypeMessage( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; + sealedValue.lambdaType.foreach { __v => + val __m = __v + _output__.writeTag(26, 2) + _output__.writeUInt32NoTag(__m.serializedSize) + __m.writeTo(_output__) + }; } def getTypeRef: dotty.tools.dotc.semanticdb.TypeRef = sealedValue.typeRef.getOrElse(dotty.tools.dotc.semanticdb.TypeRef.defaultInstance) def withTypeRef(__v: dotty.tools.dotc.semanticdb.TypeRef): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef(__v)) @@ -262,6 +274,8 @@ final case class TypeMessage( def withRepeatedType(__v: dotty.tools.dotc.semanticdb.RepeatedType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType(__v)) def getMatchType: dotty.tools.dotc.semanticdb.MatchType = sealedValue.matchType.getOrElse(dotty.tools.dotc.semanticdb.MatchType.defaultInstance) def withMatchType(__v: dotty.tools.dotc.semanticdb.MatchType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.MatchType(__v)) + def getLambdaType: dotty.tools.dotc.semanticdb.LambdaType = sealedValue.lambdaType.getOrElse(dotty.tools.dotc.semanticdb.LambdaType.defaultInstance) + def withLambdaType(__v: dotty.tools.dotc.semanticdb.LambdaType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__v)) def clearSealedValue: TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue): TypeMessage = copy(sealedValue = __v) @@ -311,6 +325,8 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType(__sealedValue.repeatedType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.RepeatedType](_input__))(LiteParser.readMessage(_input__, _))) case 202 => __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.MatchType(__sealedValue.matchType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.MatchType](_input__))(LiteParser.readMessage(_input__, _))) + case 210 => + __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__sealedValue.lambdaType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.LambdaType](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) } } @@ -345,6 +361,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc def isByNameType: _root_.scala.Boolean = false def isRepeatedType: _root_.scala.Boolean = false def isMatchType: _root_.scala.Boolean = false + def isLambdaType: _root_.scala.Boolean = false def typeRef: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeRef] = _root_.scala.None def singleType: _root_.scala.Option[dotty.tools.dotc.semanticdb.SingleType] = _root_.scala.None def thisType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ThisType] = _root_.scala.None @@ -360,6 +377,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc def byNameType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ByNameType] = _root_.scala.None def repeatedType: _root_.scala.Option[dotty.tools.dotc.semanticdb.RepeatedType] = _root_.scala.None def matchType: _root_.scala.Option[dotty.tools.dotc.semanticdb.MatchType] = _root_.scala.None + def lambdaType: _root_.scala.Option[dotty.tools.dotc.semanticdb.LambdaType] = _root_.scala.None } object SealedValue { @SerialVersionUID(0L) @@ -476,6 +494,13 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc override def matchType: _root_.scala.Option[dotty.tools.dotc.semanticdb.MatchType] = Some(value) override def number: _root_.scala.Int = 25 } + @SerialVersionUID(0L) + final case class LambdaType(value: dotty.tools.dotc.semanticdb.LambdaType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + type ValueType = dotty.tools.dotc.semanticdb.LambdaType + override def isLambdaType: _root_.scala.Boolean = true + override def lambdaType: _root_.scala.Option[dotty.tools.dotc.semanticdb.LambdaType] = Some(value) + override def number: _root_.scala.Int = 26 + } } final val TYPE_REF_FIELD_NUMBER = 2 final val SINGLE_TYPE_FIELD_NUMBER = 20 @@ -492,6 +517,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc final val BY_NAME_TYPE_FIELD_NUMBER = 13 final val REPEATED_TYPE_FIELD_NUMBER = 14 final val MATCH_TYPE_FIELD_NUMBER = 25 + final val LAMBDA_TYPE_FIELD_NUMBER = 26 def of( sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue ): _root_.dotty.tools.dotc.semanticdb.TypeMessage = _root_.dotty.tools.dotc.semanticdb.TypeMessage( @@ -2034,3 +2060,107 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType]) } + +@SerialVersionUID(0L) +final case class LambdaType( + parameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None, + returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + @transient @sharable + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { + var __size = 0 + if (parameters.isDefined) { + val __value = parameters.get + __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize + }; + + { + val __value = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toBase(returnType) + if (__value.serializedSize != 0) { + __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize + } + }; + __size + } + override def serializedSize: _root_.scala.Int = { + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size + } + __size - 1 + + } + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + parameters.foreach { __v => + val __m = __v + _output__.writeTag(1, 2) + _output__.writeUInt32NoTag(__m.serializedSize) + __m.writeTo(_output__) + }; + { + val __v = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toBase(returnType) + if (__v.serializedSize != 0) { + _output__.writeTag(2, 2) + _output__.writeUInt32NoTag(__v.serializedSize) + __v.writeTo(_output__) + } + }; + } + def getParameters: dotty.tools.dotc.semanticdb.Scope = parameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) + def clearParameters: LambdaType = copy(parameters = _root_.scala.None) + def withParameters(__v: dotty.tools.dotc.semanticdb.Scope): LambdaType = copy(parameters = Option(__v)) + def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): LambdaType = copy(returnType = __v) + + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LambdaType]) +} + +object LambdaType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType] { + implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType] = this + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LambdaType = { + var __parameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None + var __returnType: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None + var _done__ = false + while (!_done__) { + val _tag__ = _input__.readTag() + _tag__ match { + case 0 => _done__ = true + case 10 => + __parameters = Option(__parameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) + case 18 => + __returnType = _root_.scala.Some(__returnType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) + case tag => _input__.skipField(tag) + } + } + dotty.tools.dotc.semanticdb.LambdaType( + parameters = __parameters, + returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) + ) + } + + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.LambdaType( + parameters = _root_.scala.None, + returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) + ) + final val PARAMETERS_FIELD_NUMBER = 1 + final val RETURN_TYPE_FIELD_NUMBER = 2 + @transient @sharable + private[semanticdb] val _typemapper_returnType: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]] + def of( + parameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope], + returnType: dotty.tools.dotc.semanticdb.Type + ): _root_.dotty.tools.dotc.semanticdb.LambdaType = _root_.dotty.tools.dotc.semanticdb.LambdaType( + parameters, + returnType + ) + // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType]) +} diff --git a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala new file mode 100644 index 000000000000..8360d8e08211 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala @@ -0,0 +1,240 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.ast.{tpd, untpd} +import dotty.tools.dotc.core.Annotations._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.NameKinds._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.QuoteTypeTags.* +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.util.SrcPos + +/** Checks that staging level consistency holds and heals staged types. + * + * Local term references are level consistent if and only if they are used at the same level as their definition. + * + * Local type references can be used at the level of their definition or lower. If used used at a higher level, + * it will be healed if possible, otherwise it is inconsistent. + * + * Healing a type consists in replacing locally defined types defined at staging level 0 and used in higher levels. + * For each type local `T` that is defined at level 0 and used in a quote, we summon a tag `t: Type[T]`. This `t` + * tag must be defined at level 0. The tags will be listed in the `tags` of the level 0 quote (`'{ ... }`) and + * each reference to `T` will be replaced by `t.Underlying` in the body of the quote. + * + * We delay the healing of types in quotes at level 1 or higher until those quotes reach level 0. At this point + * more types will be statically known and fewer types will need to be healed. This also keeps the nested quotes + * in their original form, we do not want macro users to see any artifacts of this phase in quoted expressions + * they might inspect. + * + * Type heal example: + * + * '{ + * val x: List[T] = List[T]() + * '{ .. T .. } + * () + * } + * + * is transformed to + * + * '{ // where `t` is a given term of type `Type[T]` + * val x: List[t.Underlying] = List[t.Underlying](); + * '{ .. t.Underlying .. } + * () + * } + * + */ +class CrossStageSafety extends TreeMapWithStages { + import tpd._ + + private val InAnnotation = Property.Key[Unit]() + + override def transform(tree: Tree)(using Context): Tree = + if (tree.source != ctx.source && tree.source.exists) + transform(tree)(using ctx.withSource(tree.source)) + else tree match + case CancelledQuote(tree) => + transform(tree) // Optimization: `'{ $x }` --> `x` + case tree: Quote => + if (ctx.property(InAnnotation).isDefined) + report.error("Cannot have a quote in an annotation", tree.srcPos) + + val tree1 = + val stripAnnotationsDeep: TypeMap = new TypeMap: + def apply(tp: Type): Type = mapOver(tp.stripAnnots) + val bodyType1 = healType(tree.srcPos)(stripAnnotationsDeep(tree.bodyType)) + tree.withBodyType(bodyType1) + + if level == 0 then + val (tags, body1) = inContextWithQuoteTypeTags { transform(tree1.body)(using quoteContext) } + cpy.Quote(tree1)(body1, tags) + else + super.transform(tree1) + + case CancelledSplice(tree) => + transform(tree) // Optimization: `${ 'x }` --> `x` + case tree: Splice => + val body1 = transform(tree.expr)(using spliceContext) + val tpe1 = + if level == 0 then tree.tpe + else healType(tree.srcPos)(tree.tpe.widenTermRefExpr) + untpd.cpy.Splice(tree)(body1).withType(tpe1) + + case tree @ QuotedTypeOf(body) => + if (ctx.property(InAnnotation).isDefined) + report.error("Cannot have a quote in an annotation", tree.srcPos) + + if level == 0 then + val (tags, body1) = inContextWithQuoteTypeTags { transform(body)(using quoteContext) } + val quotes = transform(tree.args.head) + tags match + case tag :: Nil if body1.isType && body1.tpe =:= tag.tpe.select(tpnme.Underlying) => + tag // Optimization: `quoted.Type.of[x.Underlying](quotes)` --> `x` + case _ => + // `quoted.Type.of[]()` --> `'[].apply()` + tpd.Quote(body1, tags).select(nme.apply).appliedTo(quotes).withSpan(tree.span) + else + super.transform(tree) + case _: DefDef if tree.symbol.isInlineMethod => + tree + + case _ if !inQuoteOrSpliceScope => + checkAnnotations(tree) // Check quotes in annotations + super.transform(tree) + + case _: TypeTree => + val tp1 = transformTypeAnnotationSplices(tree.tpe) + val healedType = healType(tree.srcPos)(tp1) + if healedType == tree.tpe then tree + else TypeTree(healedType).withSpan(tree.span) + case _: RefTree | _: SingletonTypeTree if tree.isType => + val healedType = healType(tree.srcPos)(tree.tpe) + if healedType == tree.tpe then tree + else TypeTree(healedType).withSpan(tree.span) + case tree: Ident if isWildcardArg(tree) => + tree.withType(healType(tree.srcPos)(tree.tpe)) + case tree: Ident => // this is a term Ident + checkLevelConsistency(tree) + tree + case tree: This => + checkLevelConsistency(tree) + tree + case _: AppliedTypeTree => + super.transform(tree) match + case tree1: AppliedTypeTree if tree1 ne tree => + // propagate healed types + tree1.withType(tree1.tpt.tpe.appliedTo(tree1.args.map(_.tpe))) + case tree1 => tree1 + case tree: ValOrDefDef => + checkAnnotations(tree) + healInfo(tree, tree.tpt.srcPos) + super.transform(tree) + case tree: Bind => + checkAnnotations(tree) + healInfo(tree, tree.srcPos) + super.transform(tree) + case tree: UnApply => + super.transform(tree).withType(healType(tree.srcPos)(tree.tpe)) + case tree: TypeDef if tree.symbol.is(Case) && level > 0 => + report.error(reporting.CaseClassInInlinedCode(tree), tree) + super.transform(tree) + case _ => + super.transform(tree) + end transform + + def transformTypeAnnotationSplices(tp: Type)(using Context) = new TypeMap { + def apply(tp: Type): Type = tp match + case tp: AnnotatedType => + val newAnnotTree = transform(tp.annot.tree) + derivedAnnotatedType(tp, apply(tp.parent), tp.annot.derivedAnnotation(newAnnotTree)) + case _ => + mapOver(tp) + }.apply(tp) + + /** Check that annotations do not contain quotes and and that splices are valid */ + private def checkAnnotations(tree: Tree)(using Context): Unit = + tree match + case tree: DefTree => + lazy val annotCtx = ctx.fresh.setProperty(InAnnotation, true).withOwner(tree.symbol) + for (annot <- tree.symbol.annotations) annot match + case annot: BodyAnnotation => annot // already checked in PrepareInlineable before the creation of the BodyAnnotation + case annot => transform(annot.tree)(using annotCtx) + case _ => + + /** Heal types in the info of the given tree */ + private def healInfo(tree: Tree, pos: SrcPos)(using Context): Unit = + tree.symbol.info = healType(pos)(tree.symbol.info) + + /** If the type refers to a locally defined symbol (either directly, or in a pickled type), + * check that its staging level matches the current level. + * - Static types and term are allowed at any level. + * - If a type reference is used a higher level, then it is inconsistent. + * Will attempt to heal before failing. + * - If a term reference is used a higher level, then it is inconsistent. + * It cannot be healed because the term will not exist in any future stage. + * + * If `T` is a reference to a type at the wrong level, try to heal it by replacing it with + * a type tag of type `quoted.Type[T]`. + * The tag is generated by an instance of `QuoteTypeTags` directly if the splice is explicit + * or indirectly by `tryHeal`. + */ + protected def healType(pos: SrcPos)(tpe: Type)(using Context) = + new HealType(pos).apply(tpe) + + /** Check level consistency of terms references */ + private def checkLevelConsistency(tree: Ident | This)(using Context): Unit = + new TypeTraverser { + def traverse(tp: Type): Unit = + tp match + case tp @ TermRef(NoPrefix, _) if !tp.symbol.isStatic && level != levelOf(tp.symbol) => + levelError(tp.symbol, tp, tree.srcPos) + case tp: ThisType if level != -1 && level != levelOf(tp.cls) => + levelError(tp.cls, tp, tree.srcPos) + case tp: AnnotatedType => + traverse(tp.parent) + case _ if tp.typeSymbol.is(Package) => + // OK + case _ => + traverseChildren(tp) + }.traverse(tree.tpe) + + private def levelError(sym: Symbol, tp: Type, pos: SrcPos)(using Context): tp.type = { + def symStr = + if (!tp.isInstanceOf[ThisType]) sym.show + else if (sym.is(ModuleClass)) sym.sourceModule.show + else i"${sym.name}.this" + val hint = + if sym.is(Inline) && levelOf(sym) < level then + "\n\n" + + "Hint: Staged references to inline definition in quotes are only inlined after the quote is spliced into level 0 code by a macro. " + + "Try moving this inline definition in a statically accessible location such as an object (this definition can be private)." + else "" + report.error( + em"""access to $symStr from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level.$hint""", pos) + tp + } + + private object CancelledQuote: + def unapply(tree: Quote): Option[Tree] = + def rec(tree: Tree): Option[Tree] = tree match + case Block(Nil, expr) => rec(expr) + case Splice(inner) => Some(inner) + case _ => None + rec(tree.body) + + private object CancelledSplice: + def unapply(tree: Splice): Option[Tree] = + def rec(tree: Tree): Option[Tree] = tree match + case Block(Nil, expr) => rec(expr) + case Quote(inner, _) => Some(inner) + case _ => None + rec(tree.expr) +} diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala new file mode 100644 index 000000000000..7d3ca0ad2f63 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -0,0 +1,111 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.QuoteTypeTags.* +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.typer.Implicits.SearchFailureType +import dotty.tools.dotc.util.SrcPos + +class HealType(pos: SrcPos)(using Context) extends TypeMap { + + /** If the type refers to a locally defined symbol (either directly, or in a pickled type), + * check that its staging level matches the current level. + * - Static types and term are allowed at any level. + * - If a type reference is used a higher level, then it is inconsistent. + * Will attempt to heal before failing. + * - If a term reference is used a higher level, then it is inconsistent. + * It cannot be healed because the term will not exist in any future stage. + * + * If `T` is a reference to a type at the wrong level, try to heal it by replacing it with + * a type tag of type `quoted.Type[T]`. + * The tag is recorded by an instance of `QuoteTypeTags` directly if the splice is explicit + * or indirectly by `tryHeal`. + */ + def apply(tp: Type): Type = + tp match + case NonSpliceAlias(aliased) => this.apply(aliased) + case tp: TypeRef => healTypeRef(tp) + case tp: TermRef => + val inconsistentRoot = levelInconsistentRootOfPath(tp) + if inconsistentRoot.exists then levelError(inconsistentRoot, tp, pos) + else tp + case tp: AnnotatedType => + derivedAnnotatedType(tp, apply(tp.parent), tp.annot) + case _ => + mapOver(tp) + + private def healTypeRef(tp: TypeRef): Type = + tp.prefix match + case prefix: TermRef if tp.symbol.isTypeSplice => + checkNotWildcardSplice(tp) + if level == 0 then tp else getTagRef(prefix) + case _: TermRef | _: ThisType | NoPrefix => + if levelInconsistentRootOfPath(tp).exists then + tryHeal(tp) + else + tp + case _ => + mapOver(tp) + + private object NonSpliceAlias: + def unapply(tp: TypeRef)(using Context): Option[Type] = tp.underlying match + case TypeAlias(alias) if !tp.symbol.isTypeSplice => Some(alias) + case _ => None + + private def checkNotWildcardSplice(splice: TypeRef): Unit = + splice.prefix.termSymbol.info.argInfos match + case (tb: TypeBounds) :: _ => report.error(em"Cannot stage $splice because it is an alias to a wildcard type", pos) + case _ => + + /** Return the root of this path if it is a variable defined in a previous level. + * If the path is consistent, return NoSymbol. + */ + private def levelInconsistentRootOfPath(tp: Type)(using Context): Symbol = + tp match + case tp @ NamedType(NoPrefix, _) if level > levelOf(tp.symbol) => tp.symbol + case tp: NamedType if !tp.symbol.isStatic => levelInconsistentRootOfPath(tp.prefix) + case tp: ThisType if level > levelOf(tp.cls) => tp.cls + case _ => NoSymbol + + /** Try to heal reference to type `T` used in a higher level than its definition. + * Returns a reference to a type tag generated by `QuoteTypeTags` that contains a + * reference to a type alias containing the equivalent of `${summon[quoted.Type[T]]}.Underlying`. + * Emits an error if `T` cannot be healed and returns `T`. + */ + protected def tryHeal(tp: TypeRef): Type = { + val reqType = defn.QuotedTypeClass.typeRef.appliedTo(tp) + val tag = ctx.typer.inferImplicitArg(reqType, pos.span) + tag.tpe match + case tp: TermRef => + ctx.typer.checkStable(tp, pos, "type witness") + if levelOf(tp.symbol) > 0 then tp.select(tpnme.Underlying) + else getTagRef(tp) + case _: SearchFailureType => + report.error( + ctx.typer.missingArgMsg(tag, reqType, "") + .prepend(i"Reference to $tp within quotes requires a given $reqType in scope.\n") + .append("\n"), + pos) + tp + case _ => + report.error(em"""Reference to $tp within quotes requires a given $reqType in scope. + | + |""", pos) + tp + } + + private def levelError(sym: Symbol, tp: Type, pos: SrcPos): tp.type = { + report.error( + em"""access to $sym from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level""", pos) + tp + } +} diff --git a/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala new file mode 100644 index 000000000000..0b5032ea5a6d --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala @@ -0,0 +1,24 @@ +package dotty.tools.dotc.staging + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.util.Property + +import scala.collection.mutable.LinkedHashSet + +object QuoteTypeTags: + + private val TaggedTypes = new Property.Key[LinkedHashSet[TermRef]] + + def inContextWithQuoteTypeTags(body: Context ?=> tpd.Tree)(using Context): (List[tpd.Tree], tpd.Tree) = + val tags = LinkedHashSet.empty[TermRef] + val transformed = body(using ctx.fresh.setProperty(TaggedTypes, tags)) + (tags.toList.map(tpd.ref(_)), transformed) + + def getTagRef(spliced: TermRef)(using Context): Type = + ctx.property(TaggedTypes).get += spliced + spliced.select(tpnme.Underlying) diff --git a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala new file mode 100644 index 000000000000..05b3efab408c --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala @@ -0,0 +1,52 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.util.SrcPos + +import scala.collection.mutable + +object StagingLevel { + + /** A key to be used in a context property that tracks the staging level */ + private val LevelKey = new Property.Key[Int] + + /** A key to be used in a context property that caches the `levelOf` mapping */ + private val LevelOfKey = new Property.Key[Map[Symbol, Int]] + + /** All enclosing calls that are currently inlined, from innermost to outermost. */ + def level(using Context): Int = + ctx.property(LevelKey).getOrElse(0) + + /** Context with an incremented staging level. */ + def quoteContext(using Context): FreshContext = + ctx.fresh.setProperty(LevelKey, level + 1) + + /** Context with a decremented staging level. */ + def spliceContext(using Context): FreshContext = + ctx.fresh.setProperty(LevelKey, level - 1) + + /** If we are inside a quote or a splice */ + def inQuoteOrSpliceScope(using Context): Boolean = + ctx.property(LevelKey).isDefined + + /** The quotation level of the definition of the locally defined symbol */ + def levelOf(sym: Symbol)(using Context): Int = + ctx.property(LevelOfKey) match + case Some(map) => map.getOrElse(sym, 0) + case None => 0 + + /** Context with the current staging level set for the symbols */ + def symbolsInCurrentLevel(syms: List[Symbol])(using Context): Context = + if level == 0 then ctx + else + val levelOfMap = ctx.property(LevelOfKey).getOrElse(Map.empty) + val syms1 = syms//.filter(sym => !levelOfMap.contains(sym)) + val newMap = syms1.foldLeft(levelOfMap)((acc, sym) => acc.updated(sym, level)) + ctx.fresh.setProperty(LevelOfKey, newMap) +} diff --git a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala new file mode 100644 index 000000000000..674dfff2f642 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala @@ -0,0 +1,49 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.ast.{TreeMapWithImplicits, tpd} +import dotty.tools.dotc.config.Printers.staging +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.staging.StagingLevel.* + +import scala.collection.mutable + +/** TreeMap that keeps track of staging levels using StagingLevel. */ +abstract class TreeMapWithStages extends TreeMapWithImplicits { + import tpd._ + + override def transform(tree: Tree)(using Context): Tree = + if (tree.source != ctx.source && tree.source.exists) + transform(tree)(using ctx.withSource(tree.source)) + else reporting.trace(i"TreeMapWithStages.transform $tree at $level", staging, show = true) { + tree match { + case Block(stats, _) => + val defSyms = stats.collect { case defTree: DefTree => defTree.symbol } + super.transform(tree)(using symbolsInCurrentLevel(defSyms)) + + case CaseDef(pat, guard, body) => + super.transform(tree)(using symbolsInCurrentLevel(tpd.patVars(pat))) + + case (_:Import | _:Export) => + tree + + case _: Template => + val decls = tree.symbol.owner.info.decls.toList + super.transform(tree)(using symbolsInCurrentLevel(decls)) + + case LambdaTypeTree(tparams, body) => + super.transform(tree)(using symbolsInCurrentLevel(tparams.map(_.symbol))) + + case tree: DefTree => + val paramSyms = tree match + case tree: DefDef => tree.paramss.flatten.map(_.symbol) + case _ => Nil + super.transform(tree)(using symbolsInCurrentLevel(tree.symbol :: paramSyms)) + + case _ => + super.transform(tree) + } + } +} diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 14362260d032..3175ffceae49 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -71,7 +71,7 @@ abstract class AccessProxies { def needsAccessor(sym: Symbol)(using Context): Boolean def ifNoHost(reference: RefTree)(using Context): Tree = { - assert(false, "no host found for $reference with ${reference.symbol.showLocated} from ${ctx.owner}") + assert(false, i"no host found for $reference with ${reference.symbol.showLocated} from ${ctx.owner}") reference } diff --git a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala index 0d464d319848..0c1f40d4f2bd 100644 --- a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala +++ b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala @@ -5,7 +5,8 @@ import core._ import ast.tpd._ import Annotations._ import Contexts._ -import Symbols.newSymbol +import Symbols.* +import SymUtils.* import Decorators._ import Flags._ import Names._ @@ -23,8 +24,6 @@ class BeanProperties(thisPhase: DenotTransformer): } ::: origBody) def generateAccessors(valDef: ValDef)(using Context): List[Tree] = - import Symbols.defn - def generateGetter(valDef: ValDef, annot: Annotation)(using Context) : Tree = val prefix = if annot matches defn.BooleanBeanPropertyAnnot then "is" else "get" val meth = newSymbol( @@ -34,9 +33,9 @@ class BeanProperties(thisPhase: DenotTransformer): info = MethodType(Nil, valDef.denot.info), coord = annot.tree.span ).enteredAfter(thisPhase).asTerm - meth.addAnnotations(valDef.symbol.annotations) + .withAnnotationsCarrying(valDef.symbol, defn.BeanGetterMetaAnnot) val body: Tree = ref(valDef.symbol) - DefDef(meth, body) + DefDef(meth, body).withSpan(meth.span) def maybeGenerateSetter(valDef: ValDef, annot: Annotation)(using Context): Option[Tree] = Option.when(valDef.denot.asSymDenotation.flags.is(Mutable)) { @@ -48,9 +47,9 @@ class BeanProperties(thisPhase: DenotTransformer): info = MethodType(valDef.name :: Nil, valDef.denot.info :: Nil, defn.UnitType), coord = annot.tree.span ).enteredAfter(thisPhase).asTerm - meth.addAnnotations(valDef.symbol.annotations) + .withAnnotationsCarrying(valDef.symbol, defn.BeanSetterMetaAnnot) def body(params: List[List[Tree]]): Tree = Assign(ref(valDef.symbol), params.head.head) - DefDef(meth, body) + DefDef(meth, body).withSpan(meth.span) } def prefixedName(prefix: String, valName: Name) = diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index 90c0207ebb6d..b8cbb4367db4 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -9,15 +9,18 @@ import Symbols._, Contexts._, Types._, Decorators._ import StdNames.nme import ast.TreeTypeMap +import scala.collection.mutable.ListBuffer + /** Rewrite an application * - * (((x1, ..., xn) => b): T)(y1, ..., yn) + * (([X1, ..., Xm] => (x1, ..., xn) => b): T)[T1, ..., Tm](y1, ..., yn) * * where * * - all yi are pure references without a prefix * - the closure can also be contextual or erased, but cannot be a SAM type - * _ the type ascription ...: T is optional + * - the type parameters Xi and type arguments Ti are optional + * - the type ascription ...: T is optional * * to * @@ -36,14 +39,10 @@ class BetaReduce extends MiniPhase: override def description: String = BetaReduce.description - override def transformApply(app: Apply)(using Context): Tree = app.fun match - case Select(fn, nme.apply) if defn.isFunctionType(fn.tpe) => - val app1 = BetaReduce(app, fn, app.args) - if app1 ne app then report.log(i"beta reduce $app -> $app1") - app1 - case _ => - app - + override def transformApply(app: Apply)(using Context): Tree = + val app1 = BetaReduce(app) + if app1 ne app then report.log(i"beta reduce $app -> $app1") + app1 object BetaReduce: import ast.tpd._ @@ -51,30 +50,77 @@ object BetaReduce: val name: String = "betaReduce" val description: String = "reduce closure applications" - /** Beta-reduces a call to `fn` with arguments `argSyms` or returns `tree` */ - def apply(original: Tree, fn: Tree, args: List[Tree])(using Context): Tree = - fn match - case Typed(expr, _) => - BetaReduce(original, expr, args) - case Block((anonFun: DefDef) :: Nil, closure: Closure) => - BetaReduce(anonFun, args) - case Block(stats, expr) => - val tree = BetaReduce(original, expr, args) - if tree eq original then original - else cpy.Block(fn)(stats, tree) - case Inlined(call, bindings, expr) => - val tree = BetaReduce(original, expr, args) - if tree eq original then original - else cpy.Inlined(fn)(call, bindings, tree) + /** Rewrite an application + * + * ((x1, ..., xn) => b)(e1, ..., en) + * + * to + * + * val/def x1 = e1; ...; val/def xn = en; b + * + * where `def` is used for call-by-name parameters. However, we shortcut any NoPrefix + * refs among the ei's directly without creating an intermediate binding. + * + * Similarly, rewrites type applications + * + * ([X1, ..., Xm] => (x1, ..., xn) => b).apply[T1, .., Tm](e1, ..., en) + * + * to + * + * type X1 = T1; ...; type Xm = Tm;val/def x1 = e1; ...; val/def xn = en; b + * + * This beta-reduction preserves the integrity of `Inlined` tree nodes. + */ + def apply(tree: Tree)(using Context): Tree = + val bindingsBuf = new ListBuffer[DefTree] + def recur(fn: Tree, argss: List[List[Tree]]): Option[Tree] = fn match + case Block((ddef : DefDef) :: Nil, closure: Closure) if ddef.symbol == closure.meth.symbol => + Some(reduceApplication(ddef, argss, bindingsBuf)) + case Block((TypeDef(_, template: Template)) :: Nil, Typed(Apply(Select(New(_), _), _), _)) if template.constr.rhs.isEmpty => + template.body match + case (ddef: DefDef) :: Nil => Some(reduceApplication(ddef, argss, bindingsBuf)) + case _ => None + case Block(stats, expr) if stats.forall(isPureBinding) => + recur(expr, argss).map(cpy.Block(fn)(stats, _)) + case Inlined(call, bindings, expr) if bindings.forall(isPureBinding) => + recur(expr, argss).map(cpy.Inlined(fn)(call, bindings, _)) + case Typed(expr, tpt) => + recur(expr, argss) + case TypeApply(Select(expr, nme.asInstanceOfPM), List(tpt)) => + recur(expr, argss) + case _ => None + tree match + case Apply(Select(fn, nme.apply), args) if defn.isFunctionType(fn.tpe) => + recur(fn, List(args)) match + case Some(reduced) => + seq(bindingsBuf.result(), reduced).withSpan(tree.span) + case None => + tree + case Apply(TypeApply(Select(fn, nme.apply), targs), args) if fn.tpe.typeSymbol eq dotc.core.Symbols.defn.PolyFunctionClass => + recur(fn, List(targs, args)) match + case Some(reduced) => + seq(bindingsBuf.result(), reduced).withSpan(tree.span) + case None => + tree case _ => - original - end apply + tree + + /** Beta-reduces a call to `ddef` with arguments `args` and registers new bindings */ + def reduceApplication(ddef: DefDef, argss: List[List[Tree]], bindings: ListBuffer[DefTree])(using Context): Tree = + val (targs, args) = argss.flatten.partition(_.isType) + val tparams = ddef.leadingTypeParams + val vparams = ddef.termParamss.flatten + + val targSyms = + for (targ, tparam) <- targs.zip(tparams) yield + targ.tpe.dealias match + case ref @ TypeRef(NoPrefix, _) => + ref.symbol + case _ => + val binding = TypeDef(newSymbol(ctx.owner, tparam.name, EmptyFlags, TypeAlias(targ.tpe), coord = targ.span)).withSpan(targ.span) + bindings += binding + binding.symbol - /** Beta-reduces a call to `ddef` with arguments `argSyms` */ - def apply(ddef: DefDef, args: List[Tree])(using Context) = - val bindings = List.newBuilder[ValDef] - val vparams = ddef.termParamss.iterator.flatten.toList - assert(args.hasSameLengthAs(vparams)) val argSyms = for (arg, param) <- args.zip(vparams) yield arg.tpe.dealias match @@ -82,16 +128,20 @@ object BetaReduce: ref.symbol case _ => val flags = Synthetic | (param.symbol.flags & Erased) - val tpe = if arg.tpe.dealias.isInstanceOf[ConstantType] then arg.tpe.dealias else arg.tpe.widen + val tpe = + if arg.tpe.isBottomType then param.tpe.widenTermRefExpr + else if arg.tpe.dealias.isInstanceOf[ConstantType] then arg.tpe.dealias + else arg.tpe.widen val binding = ValDef(newSymbol(ctx.owner, param.name, flags, tpe, coord = arg.span), arg).withSpan(arg.span) - bindings += binding + if !(tpe.isInstanceOf[ConstantType] && isPureExpr(arg)) then + bindings += binding binding.symbol val expansion = TreeTypeMap( oldOwners = ddef.symbol :: Nil, newOwners = ctx.owner :: Nil, - substFrom = vparams.map(_.symbol), - substTo = argSyms + substFrom = (tparams ::: vparams).map(_.symbol), + substTo = targSyms ::: argSyms ).transform(ddef.rhs) val expansion1 = new TreeMap { @@ -99,8 +149,5 @@ object BetaReduce: case ConstantType(const) if isPureExpr(tree) => cpy.Literal(tree)(const) case _ => super.transform(tree) }.transform(expansion) - val bindings1 = - bindings.result().filterNot(vdef => vdef.tpt.tpe.isInstanceOf[ConstantType] && isPureExpr(vdef.rhs)) - seq(bindings1, expansion1) - end apply + expansion1 diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index e302170991f9..569b16681cde 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -129,9 +129,12 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { assert(ctx.typer.isInstanceOf[Erasure.Typer]) ctx.typer.typed(untpd.cpy.Apply(ref)(ref, args), member.info.finalResultType) else - val defn.ContextFunctionType(argTypes, resType, isErased) = tp: @unchecked + val defn.ContextFunctionType(argTypes, resType, erasedParams) = tp: @unchecked val anonFun = newAnonFun(ctx.owner, - MethodType(if isErased then Nil else argTypes, resType), + MethodType( + argTypes.zip(erasedParams.padTo(argTypes.length, false)) + .flatMap((t, e) => if e then None else Some(t)), + resType), coord = ctx.owner.coord) anonFun.info = transformInfo(anonFun, anonFun.info) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index 6b0a4c3e9737..b63773687f74 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -67,8 +67,8 @@ class CheckReentrant extends MiniPhase { if (sym.isTerm && !sym.isSetter && !isIgnored(sym)) if (sym.is(Mutable)) { report.error( - i"""possible data race involving globally reachable ${sym.showLocated}: ${sym.info} - | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""") + em"""possible data race involving globally reachable ${sym.showLocated}: ${sym.info} + | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""") shared += sym } else if (!sym.is(Method) || sym.isOneOf(Accessor | ParamAccessor)) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala new file mode 100644 index 000000000000..bd521c8679d0 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -0,0 +1,786 @@ +package dotty.tools.dotc.transform + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.tpd.{Inlined, TreeTraverser} +import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.ast.untpd.ImportSelector +import dotty.tools.dotc.config.ScalaSettings +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.{em, i} +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.report +import dotty.tools.dotc.reporting.Message +import dotty.tools.dotc.typer.ImportInfo +import dotty.tools.dotc.util.{Property, SrcPos} +import dotty.tools.dotc.core.Mode +import dotty.tools.dotc.core.Types.{AnnotatedType, ConstantType, NoType, TermRef, Type, TypeTraverser} +import dotty.tools.dotc.core.Flags.flagsString +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.Names.Name +import dotty.tools.dotc.transform.MegaPhase.MiniPhase +import dotty.tools.dotc.core.Annotations +import dotty.tools.dotc.core.Definitions +import dotty.tools.dotc.core.NameKinds.WildcardParamName +import dotty.tools.dotc.core.Symbols.Symbol +import dotty.tools.dotc.core.StdNames.nme +import scala.math.Ordering + + +/** + * A compiler phase that checks for unused imports or definitions + * + * Basically, it gathers definition/imports and their usage. If a + * definition/imports does not have any usage, then it is reported. + */ +class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _key: Property.Key[CheckUnused.UnusedData]) extends MiniPhase: + import CheckUnused.* + import UnusedData.* + + private def unusedDataApply[U](f: UnusedData => U)(using Context): Context = + ctx.property(_key).foreach(f) + ctx + + override def phaseName: String = CheckUnused.phaseNamePrefix + suffix + + override def description: String = CheckUnused.description + + override def isRunnable(using Context): Boolean = + super.isRunnable && + ctx.settings.Wunused.value.nonEmpty && + !ctx.isJava + + // ========== SETUP ============ + + override def prepareForUnit(tree: tpd.Tree)(using Context): Context = + val data = UnusedData() + tree.getAttachment(_key).foreach(oldData => + data.unusedAggregate = oldData.unusedAggregate + ) + val fresh = ctx.fresh.setProperty(_key, data) + tree.putAttachment(_key, data) + fresh + + // ========== END + REPORTING ========== + + override def transformUnit(tree: tpd.Tree)(using Context): tpd.Tree = + unusedDataApply { ud => + ud.finishAggregation() + if(phaseMode == PhaseMode.Report) then + ud.unusedAggregate.foreach(reportUnused) + } + tree + + // ========== MiniPhase Prepare ========== + override def prepareForOther(tree: tpd.Tree)(using Context): Context = + // A standard tree traverser covers cases not handled by the Mega/MiniPhase + traverser.traverse(tree) + ctx + + override def prepareForInlined(tree: tpd.Inlined)(using Context): Context = + traverser.traverse(tree.call) + ctx + + override def prepareForIdent(tree: tpd.Ident)(using Context): Context = + if tree.symbol.exists then + val prefixes = LazyList.iterate(tree.typeOpt.normalizedPrefix)(_.normalizedPrefix).takeWhile(_ != NoType) + .take(10) // Failsafe for the odd case if there was an infinite cycle + for prefix <- prefixes do + unusedDataApply(_.registerUsed(prefix.classSymbol, None)) + unusedDataApply(_.registerUsed(tree.symbol, Some(tree.name))) + else if tree.hasType then + unusedDataApply(_.registerUsed(tree.tpe.classSymbol, Some(tree.name))) + else + ctx + + override def prepareForSelect(tree: tpd.Select)(using Context): Context = + unusedDataApply(_.registerUsed(tree.symbol, Some(tree.name))) + + override def prepareForBlock(tree: tpd.Block)(using Context): Context = + pushInBlockTemplatePackageDef(tree) + + override def prepareForTemplate(tree: tpd.Template)(using Context): Context = + pushInBlockTemplatePackageDef(tree) + + override def prepareForPackageDef(tree: tpd.PackageDef)(using Context): Context = + pushInBlockTemplatePackageDef(tree) + + override def prepareForValDef(tree: tpd.ValDef)(using Context): Context = + unusedDataApply{ud => + // do not register the ValDef generated for `object` + traverseAnnotations(tree.symbol) + if !tree.symbol.is(Module) then + ud.registerDef(tree) + if tree.name.mangledString.startsWith(nme.derived.mangledString + "$") + && tree.typeOpt != NoType then + ud.registerUsed(tree.typeOpt.typeSymbol, None, true) + ud.addIgnoredUsage(tree.symbol) + } + + override def prepareForDefDef(tree: tpd.DefDef)(using Context): Context = + unusedDataApply{ ud => + if !tree.symbol.is(Private) then + tree.termParamss.flatten.foreach { p => + ud.addIgnoredParam(p.symbol) + } + import ud.registerTrivial + tree.registerTrivial + traverseAnnotations(tree.symbol) + ud.registerDef(tree) + ud.addIgnoredUsage(tree.symbol) + } + + override def prepareForTypeDef(tree: tpd.TypeDef)(using Context): Context = + unusedDataApply{ ud => + if !tree.symbol.is(Param) then // Ignore type parameter (as Scala 2) + traverseAnnotations(tree.symbol) + ud.registerDef(tree) + ud.addIgnoredUsage(tree.symbol) + } + + override def prepareForBind(tree: tpd.Bind)(using Context): Context = + traverseAnnotations(tree.symbol) + unusedDataApply(_.registerPatVar(tree)) + + override def prepareForTypeTree(tree: tpd.TypeTree)(using Context): Context = + if !tree.isInstanceOf[tpd.InferredTypeTree] then typeTraverser(unusedDataApply).traverse(tree.tpe) + ctx + + override def prepareForAssign(tree: tpd.Assign)(using Context): Context = + unusedDataApply{ ud => + val sym = tree.lhs.symbol + if sym.exists then + ud.registerSetVar(sym) + } + + // ========== MiniPhase Transform ========== + + override def transformBlock(tree: tpd.Block)(using Context): tpd.Tree = + popOutBlockTemplatePackageDef() + tree + + override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = + popOutBlockTemplatePackageDef() + tree + + override def transformPackageDef(tree: tpd.PackageDef)(using Context): tpd.Tree = + popOutBlockTemplatePackageDef() + tree + + override def transformValDef(tree: tpd.ValDef)(using Context): tpd.Tree = + unusedDataApply(_.removeIgnoredUsage(tree.symbol)) + tree + + override def transformDefDef(tree: tpd.DefDef)(using Context): tpd.Tree = + unusedDataApply(_.removeIgnoredUsage(tree.symbol)) + tree + + override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree = + unusedDataApply(_.removeIgnoredUsage(tree.symbol)) + tree + + + // ---------- MiniPhase HELPERS ----------- + + private def pushInBlockTemplatePackageDef(tree: tpd.Block | tpd.Template | tpd.PackageDef)(using Context): Context = + unusedDataApply { ud => + ud.pushScope(UnusedData.ScopeType.fromTree(tree)) + } + ctx + + private def popOutBlockTemplatePackageDef()(using Context): Context = + unusedDataApply { ud => + ud.popScope() + } + ctx + + private def newCtx(tree: tpd.Tree)(using Context) = + if tree.symbol.exists then ctx.withOwner(tree.symbol) else ctx + + /** + * This traverse is the **main** component of this phase + * + * It traverse the tree the tree and gather the data in the + * corresponding context property + */ + private def traverser = new TreeTraverser: + import tpd._ + import UnusedData.ScopeType + + /* Register every imports, definition and usage */ + override def traverse(tree: tpd.Tree)(using Context): Unit = + val newCtx = if tree.symbol.exists then ctx.withOwner(tree.symbol) else ctx + tree match + case imp: tpd.Import => + unusedDataApply(_.registerImport(imp)) + imp.selectors.filter(_.isGiven).map(_.bound).collect { + case untpd.TypedSplice(tree1) => tree1 + }.foreach(traverse(_)(using newCtx)) + traverseChildren(tree)(using newCtx) + case ident: Ident => + prepareForIdent(ident) + traverseChildren(tree)(using newCtx) + case sel: Select => + prepareForSelect(sel) + traverseChildren(tree)(using newCtx) + case tree: (tpd.Block | tpd.Template | tpd.PackageDef) => + //! DIFFERS FROM MINIPHASE + pushInBlockTemplatePackageDef(tree) + traverseChildren(tree)(using newCtx) + popOutBlockTemplatePackageDef() + case t:tpd.ValDef => + prepareForValDef(t) + traverseChildren(tree)(using newCtx) + transformValDef(t) + case t:tpd.DefDef => + prepareForDefDef(t) + traverseChildren(tree)(using newCtx) + transformDefDef(t) + case t:tpd.TypeDef => + prepareForTypeDef(t) + traverseChildren(tree)(using newCtx) + transformTypeDef(t) + case t: tpd.Bind => + prepareForBind(t) + traverseChildren(tree)(using newCtx) + case t:tpd.Assign => + prepareForAssign(t) + traverseChildren(tree) + case _: tpd.InferredTypeTree => + case t@tpd.TypeTree() => + //! DIFFERS FROM MINIPHASE + typeTraverser(unusedDataApply).traverse(t.tpe) + traverseChildren(tree)(using newCtx) + case _ => + //! DIFFERS FROM MINIPHASE + traverseChildren(tree)(using newCtx) + end traverse + end traverser + + /** This is a type traverser which catch some special Types not traversed by the term traverser above */ + private def typeTraverser(dt: (UnusedData => Any) => Unit)(using Context) = new TypeTraverser: + override def traverse(tp: Type): Unit = + if tp.typeSymbol.exists then dt(_.registerUsed(tp.typeSymbol, Some(tp.typeSymbol.name))) + tp match + case AnnotatedType(_, annot) => + dt(_.registerUsed(annot.symbol, None)) + traverseChildren(tp) + case _ => + traverseChildren(tp) + + /** This traverse the annotations of the symbol */ + private def traverseAnnotations(sym: Symbol)(using Context): Unit = + sym.denot.annotations.foreach(annot => traverser.traverse(annot.tree)) + + + /** Do the actual reporting given the result of the anaylsis */ + private def reportUnused(res: UnusedData.UnusedResult)(using Context): Unit = + res.warnings.toList.sortBy(_.pos.line)(using Ordering[Int]).foreach { s => + s match + case UnusedSymbol(t, _, WarnTypes.Imports) => + report.warning(s"unused import", t) + case UnusedSymbol(t, _, WarnTypes.LocalDefs) => + report.warning(s"unused local definition", t) + case UnusedSymbol(t, _, WarnTypes.ExplicitParams) => + report.warning(s"unused explicit parameter", t) + case UnusedSymbol(t, _, WarnTypes.ImplicitParams) => + report.warning(s"unused implicit parameter", t) + case UnusedSymbol(t, _, WarnTypes.PrivateMembers) => + report.warning(s"unused private member", t) + case UnusedSymbol(t, _, WarnTypes.PatVars) => + report.warning(s"unused pattern variable", t) + case UnusedSymbol(t, _, WarnTypes.UnsetLocals) => + report.warning(s"unset local variable", t) + case UnusedSymbol(t, _, WarnTypes.UnsetPrivates) => + report.warning(s"unset private variable", t) + } + +end CheckUnused + +object CheckUnused: + val phaseNamePrefix: String = "checkUnused" + val description: String = "check for unused elements" + + enum PhaseMode: + case Aggregate + case Report + + private enum WarnTypes: + case Imports + case LocalDefs + case ExplicitParams + case ImplicitParams + case PrivateMembers + case PatVars + case UnsetLocals + case UnsetPrivates + + /** + * The key used to retrieve the "unused entity" analysis metadata, + * from the compilation `Context` + */ + private val _key = Property.StickyKey[UnusedData] + + class PostTyper extends CheckUnused(PhaseMode.Aggregate, "PostTyper", _key) + + class PostInlining extends CheckUnused(PhaseMode.Report, "PostInlining", _key) + + /** + * A stateful class gathering the infos on : + * - imports + * - definitions + * - usage + */ + private class UnusedData: + import collection.mutable.{Set => MutSet, Map => MutMap, Stack => MutStack} + import UnusedData.* + + /** The current scope during the tree traversal */ + val currScopeType: MutStack[ScopeType] = MutStack(ScopeType.Other) + + var unusedAggregate: Option[UnusedResult] = None + + /* IMPORTS */ + private val impInScope = MutStack(MutSet[tpd.Import]()) + /** + * We store the symbol along with their accessibility without import. + * Accessibility to their definition in outer context/scope + * + * See the `isAccessibleAsIdent` extension method below in the file + */ + private val usedInScope = MutStack(MutSet[(Symbol,Boolean, Option[Name], Boolean)]()) + private val usedInPosition = MutSet[(SrcPos, Name)]() + /* unused import collected during traversal */ + private val unusedImport = MutSet[ImportSelector]() + + /* LOCAL DEF OR VAL / Private Def or Val / Pattern variables */ + private val localDefInScope = MutSet[tpd.MemberDef]() + private val privateDefInScope = MutSet[tpd.MemberDef]() + private val explicitParamInScope = MutSet[tpd.MemberDef]() + private val implicitParamInScope = MutSet[tpd.MemberDef]() + private val patVarsInScope = MutSet[tpd.Bind]() + + /** All variables sets*/ + private val setVars = MutSet[Symbol]() + + /** All used symbols */ + private val usedDef = MutSet[Symbol]() + /** Do not register as used */ + private val doNotRegister = MutSet[Symbol]() + + /** Trivial definitions, avoid registering params */ + private val trivialDefs = MutSet[Symbol]() + + private val paramsToSkip = MutSet[Symbol]() + + + def finishAggregation(using Context)(): Unit = + val unusedInThisStage = this.getUnused + this.unusedAggregate match { + case None => + this.unusedAggregate = Some(unusedInThisStage) + case Some(prevUnused) => + val intersection = unusedInThisStage.warnings.intersect(prevUnused.warnings) + this.unusedAggregate = Some(UnusedResult(intersection)) + } + + + /** + * Register a found (used) symbol along with its name + * + * The optional name will be used to target the right import + * as the same element can be imported with different renaming + */ + def registerUsed(sym: Symbol, name: Option[Name], isDerived: Boolean = false)(using Context): Unit = + if !isConstructorOfSynth(sym) && !doNotRegister(sym) then + if sym.isConstructor && sym.exists then + registerUsed(sym.owner, None) // constructor are "implicitly" imported with the class + else + usedInScope.top += ((sym, sym.isAccessibleAsIdent, name, isDerived)) + usedInScope.top += ((sym.companionModule, sym.isAccessibleAsIdent, name, isDerived)) + usedInScope.top += ((sym.companionClass, sym.isAccessibleAsIdent, name, isDerived)) + if sym.sourcePos.exists then + name.map(n => usedInPosition += ((sym.sourcePos, n))) + + /** Register a symbol that should be ignored */ + def addIgnoredUsage(sym: Symbol)(using Context): Unit = + doNotRegister ++= sym.everySymbol + + /** Remove a symbol that shouldn't be ignored anymore */ + def removeIgnoredUsage(sym: Symbol)(using Context): Unit = + doNotRegister --= sym.everySymbol + + def addIgnoredParam(sym: Symbol)(using Context): Unit = + paramsToSkip += sym + + /** Register an import */ + def registerImport(imp: tpd.Import)(using Context): Unit = + if !tpd.languageImport(imp.expr).nonEmpty && !imp.isGeneratedByEnum && !isTransparentAndInline(imp) then + impInScope.top += imp + unusedImport ++= imp.selectors.filter { s => + !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) + } + + /** Register (or not) some `val` or `def` according to the context, scope and flags */ + def registerDef(memDef: tpd.MemberDef)(using Context): Unit = + if memDef.isValidMemberDef then + if memDef.isValidParam then + if memDef.symbol.isOneOf(GivenOrImplicit) then + if !paramsToSkip.contains(memDef.symbol) then + implicitParamInScope += memDef + else if !paramsToSkip.contains(memDef.symbol) then + explicitParamInScope += memDef + else if currScopeType.top == ScopeType.Local then + localDefInScope += memDef + else if memDef.shouldReportPrivateDef then + privateDefInScope += memDef + + /** Register pattern variable */ + def registerPatVar(patvar: tpd.Bind)(using Context): Unit = + if !patvar.symbol.isUnusedAnnot then + patVarsInScope += patvar + + /** enter a new scope */ + def pushScope(newScopeType: ScopeType): Unit = + // unused imports : + currScopeType.push(newScopeType) + impInScope.push(MutSet()) + usedInScope.push(MutSet()) + + def registerSetVar(sym: Symbol): Unit = + setVars += sym + + /** + * leave the current scope and do : + * + * - If there are imports in this scope check for unused ones + */ + def popScope()(using Context): Unit = + // used symbol in this scope + val used = usedInScope.pop().toSet + // used imports in this scope + val imports = impInScope.pop() + val kept = used.filterNot { (sym, isAccessible, optName, isDerived) => + // keep the symbol for outer scope, if it matches **no** import + // This is the first matching wildcard selector + var selWildCard: Option[ImportSelector] = None + + val matchedExplicitImport = imports.exists { imp => + sym.isInImport(imp, isAccessible, optName, isDerived) match + case None => false + case optSel@Some(sel) if sel.isWildcard => + if selWildCard.isEmpty then selWildCard = optSel + // We keep wildcard symbol for the end as they have the least precedence + false + case Some(sel) => + unusedImport -= sel + true + } + if !matchedExplicitImport && selWildCard.isDefined then + unusedImport -= selWildCard.get + true // a matching import exists so the symbol won't be kept for outer scope + else + matchedExplicitImport + } + + // if there's an outer scope + if usedInScope.nonEmpty then + // we keep the symbols not referencing an import in this scope + // as it can be the only reference to an outer import + usedInScope.top ++= kept + // register usage in this scope for other warnings at the end of the phase + usedDef ++= used.map(_._1) + // retrieve previous scope type + currScopeType.pop + end popScope + + /** + * Leave the scope and return a `List` of unused `ImportSelector`s + * + * The given `List` is sorted by line and then column of the position + */ + + def getUnused(using Context): UnusedResult = + popScope() + + val sortedImp = + if ctx.settings.WunusedHas.imports || ctx.settings.WunusedHas.strictNoImplicitWarn then + unusedImport.map(d => UnusedSymbol(d.srcPos, d.name, WarnTypes.Imports)).toList + else + Nil + // Partition to extract unset local variables from usedLocalDefs + val (usedLocalDefs, unusedLocalDefs) = + if ctx.settings.WunusedHas.locals then + localDefInScope.partition(d => d.symbol.usedDefContains) + else + (Nil, Nil) + val sortedLocalDefs = + unusedLocalDefs + .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.LocalDefs)).toList + val unsetLocalDefs = usedLocalDefs.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetLocals)).toList + + val sortedExplicitParams = + if ctx.settings.WunusedHas.explicits then + explicitParamInScope + .filterNot(d => d.symbol.usedDefContains) + .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ExplicitParams)).toList + else + Nil + val sortedImplicitParams = + if ctx.settings.WunusedHas.implicits then + implicitParamInScope + .filterNot(d => d.symbol.usedDefContains) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ImplicitParams)).toList + else + Nil + // Partition to extract unset private variables from usedPrivates + val (usedPrivates, unusedPrivates) = + if ctx.settings.WunusedHas.privates then + privateDefInScope.partition(d => d.symbol.usedDefContains) + else + (Nil, Nil) + val sortedPrivateDefs = unusedPrivates.filterNot(d => containsSyntheticSuffix(d.symbol)).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PrivateMembers)).toList + val unsetPrivateDefs = usedPrivates.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetPrivates)).toList + val sortedPatVars = + if ctx.settings.WunusedHas.patvars then + patVarsInScope + .filterNot(d => d.symbol.usedDefContains) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PatVars)).toList + else + Nil + val warnings = + List(sortedImp, sortedLocalDefs, sortedExplicitParams, sortedImplicitParams, + sortedPrivateDefs, sortedPatVars, unsetLocalDefs, unsetPrivateDefs).flatten.sortBy { s => + val pos = s.pos.sourcePos + (pos.line, pos.column) + } + UnusedResult(warnings.toSet) + end getUnused + //============================ HELPERS ==================================== + + + /** + * Checks if import selects a def that is transparent and inline + */ + private def isTransparentAndInline(imp: tpd.Import)(using Context): Boolean = + imp.selectors.exists { sel => + val qual = imp.expr + val importedMembers = qual.tpe.member(sel.name).alternatives.map(_.symbol) + importedMembers.exists(s => s.is(Transparent) && s.is(Inline)) + } + + /** + * Heuristic to detect synthetic suffixes in names of symbols + */ + private def containsSyntheticSuffix(symbol: Symbol)(using Context): Boolean = + symbol.name.mangledString.contains("$") + + /** + * Is the the constructor of synthetic package object + * Should be ignored as it is always imported/used in package + * Trigger false negative on used import + * + * Without this check example: + * + * --- WITH PACKAGE : WRONG --- + * {{{ + * package a: + * val x: Int = 0 + * package b: + * import a._ // no warning + * }}} + * --- WITH OBJECT : OK --- + * {{{ + * object a: + * val x: Int = 0 + * object b: + * import a._ // unused warning + * }}} + */ + private def isConstructorOfSynth(sym: Symbol)(using Context): Boolean = + sym.exists && sym.isConstructor && sym.owner.isPackageObject && sym.owner.is(Synthetic) + + /** + * This is used to avoid reporting the parameters of the synthetic main method + * generated by `@main` + */ + private def isSyntheticMainParam(sym: Symbol)(using Context): Boolean = + sym.exists && ctx.platform.isMainMethod(sym.owner) && sym.owner.is(Synthetic) + + /** + * This is used to ignore exclusion imports (i.e. import `qual`.{`member` => _}) + */ + private def isImportExclusion(sel: ImportSelector): Boolean = sel.renamed match + case untpd.Ident(name) => name == StdNames.nme.WILDCARD + case _ => false + + /** + * If -Wunused:strict-no-implicit-warn import and this import selector could potentially import implicit. + * return true + */ + private def shouldSelectorBeReported(imp: tpd.Import, sel: ImportSelector)(using Context): Boolean = + ctx.settings.WunusedHas.strictNoImplicitWarn && ( + sel.isWildcard || + imp.expr.tpe.member(sel.name.toTermName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit)) || + imp.expr.tpe.member(sel.name.toTypeName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit)) + ) + + extension (tree: ImportSelector) + def boundTpe: Type = tree.bound match { + case untpd.TypedSplice(tree1) => tree1.tpe + case _ => NoType + } + + extension (sym: Symbol) + /** is accessible without import in current context */ + private def isAccessibleAsIdent(using Context): Boolean = + sym.exists && + ctx.outersIterator.exists{ c => + c.owner == sym.owner + || sym.owner.isClass && c.owner.isClass + && c.owner.thisType.baseClasses.contains(sym.owner) + && c.owner.thisType.member(sym.name).alternatives.contains(sym) + } + + /** Given an import and accessibility, return selector that matches import<->symbol */ + private def isInImport(imp: tpd.Import, isAccessible: Boolean, symName: Option[Name], isDerived: Boolean)(using Context): Option[ImportSelector] = + val tpd.Import(qual, sels) = imp + val dealiasedSym = dealias(sym) + val simpleSelections = qual.tpe.member(sym.name).alternatives + val typeSelections = sels.flatMap(n => qual.tpe.member(n.name.toTypeName).alternatives) + val termSelections = sels.flatMap(n => qual.tpe.member(n.name.toTermName).alternatives) + val selectionsToDealias = typeSelections ::: termSelections + val qualHasSymbol = simpleSelections.map(_.symbol).contains(sym) || (simpleSelections ::: selectionsToDealias).map(_.symbol).map(dealias).contains(dealiasedSym) + def selector = sels.find(sel => (sel.name.toTermName == sym.name || sel.name.toTypeName == sym.name) && symName.map(n => n.toTermName == sel.rename).getOrElse(true)) + def dealiasedSelector = if(isDerived) sels.flatMap(sel => selectionsToDealias.map(m => (sel, m.symbol))).collect { + case (sel, sym) if dealias(sym) == dealiasedSym => sel + }.headOption else None + def givenSelector = if sym.is(Given) || sym.is(Implicit) + then sels.filter(sel => sel.isGiven && !sel.bound.isEmpty).find(sel => sel.boundTpe =:= sym.info) + else None + def wildcard = sels.find(sel => sel.isWildcard && ((sym.is(Given) == sel.isGiven && sel.bound.isEmpty) || sym.is(Implicit))) + if qualHasSymbol && (!isAccessible || sym.isRenamedSymbol(symName)) && sym.exists then + selector.orElse(dealiasedSelector).orElse(givenSelector).orElse(wildcard) // selector with name or wildcard (or given) + else + None + + private def isRenamedSymbol(symNameInScope: Option[Name])(using Context) = + sym.name != nme.NO_NAME && symNameInScope.exists(_.toSimpleName != sym.name.toSimpleName) + + private def dealias(symbol: Symbol)(using Context): Symbol = + if(symbol.isType && symbol.asType.denot.isAliasType) then + symbol.asType.typeRef.dealias.typeSymbol + else symbol + /** Annotated with @unused */ + private def isUnusedAnnot(using Context): Boolean = + sym.annotations.exists(a => a.symbol == ctx.definitions.UnusedAnnot) + + private def shouldNotReportParamOwner(using Context): Boolean = + if sym.exists then + val owner = sym.owner + trivialDefs(owner) || // is a trivial def + owner.isPrimaryConstructor || + owner.annotations.exists ( // @depreacated + _.symbol == ctx.definitions.DeprecatedAnnot + ) || + owner.isAllOf(Synthetic | PrivateLocal) || + owner.is(Accessor) || + owner.isOverriden + else + false + + private def usedDefContains(using Context): Boolean = + sym.everySymbol.exists(usedDef.apply) + + private def everySymbol(using Context): List[Symbol] = + List(sym, sym.companionClass, sym.companionModule, sym.moduleClass).filter(_.exists) + + /** A function is overriden. Either has `override flags` or parent has a matching member (type and name) */ + private def isOverriden(using Context): Boolean = + sym.is(Flags.Override) || + (sym.exists && sym.owner.thisType.parents.exists(p => sym.matchingMember(p).exists)) + + end extension + + extension (defdef: tpd.DefDef) + // so trivial that it never consumes params + private def isTrivial(using Context): Boolean = + val rhs = defdef.rhs + rhs.symbol == ctx.definitions.Predef_undefined || + rhs.tpe =:= ctx.definitions.NothingType || + defdef.symbol.is(Deferred) || + (rhs match { + case _: tpd.Literal => true + case _ => rhs.tpe match + case ConstantType(_) => true + case tp: TermRef => + // Detect Scala 2 SingleType + tp.underlying.classSymbol.is(Flags.Module) + case _ => + false + }) + def registerTrivial(using Context): Unit = + if defdef.isTrivial then + trivialDefs += defdef.symbol + + extension (memDef: tpd.MemberDef) + private def isValidMemberDef(using Context): Boolean = + memDef.symbol.exists + && !memDef.symbol.isUnusedAnnot + && !memDef.symbol.isAllOf(Flags.AccessorCreationFlags) + && !memDef.name.isWildcard + && !memDef.symbol.owner.is(ExtensionMethod) + + private def isValidParam(using Context): Boolean = + val sym = memDef.symbol + (sym.is(Param) || sym.isAllOf(PrivateParamAccessor | Local, butNot = CaseAccessor)) && + !isSyntheticMainParam(sym) && + !sym.shouldNotReportParamOwner + + private def shouldReportPrivateDef(using Context): Boolean = + currScopeType.top == ScopeType.Template && !memDef.symbol.isConstructor && memDef.symbol.is(Private, butNot = SelfName | Synthetic | CaseAccessor) + + private def isUnsetVarDef(using Context): Boolean = + val sym = memDef.symbol + sym.is(Mutable) && !setVars(sym) + + extension (imp: tpd.Import) + /** Enum generate an import for its cases (but outside them), which should be ignored */ + def isGeneratedByEnum(using Context): Boolean = + imp.symbol.exists && imp.symbol.owner.is(Flags.Enum, butNot = Flags.Case) + + extension (thisName: Name) + private def isWildcard: Boolean = + thisName == StdNames.nme.WILDCARD || thisName.is(WildcardParamName) + + end UnusedData + + private object UnusedData: + enum ScopeType: + case Local + case Template + case Other + + object ScopeType: + /** return the scope corresponding to the enclosing scope of the given tree */ + def fromTree(tree: tpd.Tree): ScopeType = tree match + case _:tpd.Template => Template + case _:tpd.Block => Local + case _ => Other + + case class UnusedSymbol(pos: SrcPos, name: Name, warnType: WarnTypes) + /** A container for the results of the used elements analysis */ + case class UnusedResult(warnings: Set[UnusedSymbol]) + object UnusedResult: + val Empty = UnusedResult(Set.empty) + +end CheckUnused + diff --git a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala index be454281bcbb..b7e8ccf4e7e1 100644 --- a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala +++ b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala @@ -80,7 +80,7 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => parents.map { case app @ Apply(fn, args0) if fn.symbol.owner == targetCls => if args0.nonEmpty && targetCls == defn.JavaEnumClass then - report.error("the constructor of java.lang.Enum cannot be called explicitly", app.sourcePos) + report.error(em"the constructor of java.lang.Enum cannot be called explicitly", app.sourcePos) cpy.Apply(app)(fn, args0 ++ args) case p => p } @@ -110,7 +110,7 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => yield { def forwarderSym(flags: FlagSet, info: Type): Symbol { type ThisName = TermName } = val sym = newSymbol(clazz, enumValue.name.asTermName, flags, info) - sym.addAnnotation(Annotations.Annotation(defn.ScalaStaticAnnot)) + sym.addAnnotation(Annotations.Annotation(defn.ScalaStaticAnnot, sym.span)) sym val body = moduleRef.select(enumValue) if ctx.settings.scalajs.value then diff --git a/compiler/src/dotty/tools/dotc/transform/Constructors.scala b/compiler/src/dotty/tools/dotc/transform/Constructors.scala index 59b90ff7f084..4dd7205e4ee0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Constructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/Constructors.scala @@ -226,31 +226,39 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = constrStats += intoConstr(stat, sym) } else dropped += sym - case stat @ DefDef(name, _, tpt, _) - if stat.symbol.isGetter && stat.symbol.owner.is(Trait) && !stat.symbol.is(Lazy) && !stat.symbol.isConstExprFinalVal => + case stat @ DefDef(name, _, tpt, _) if stat.symbol.isGetter && !stat.symbol.is(Lazy) => val sym = stat.symbol assert(isRetained(sym), sym) - if !stat.rhs.isEmpty && !isWildcardArg(stat.rhs) then - /* !!! Work around #9390 - * This should really just be `sym.setter`. However, if we do that, we'll miss - * setters for mixed in `private var`s. Even though the scope clearly contains the - * setter symbol with the correct Name structure (since the `find` finds it), - * `.decl(setterName)` used by `.setter` through `.accessorNamed` will *not* find it. - * Could it be that the hash table of the `Scope` is corrupted? - * We still try `sym.setter` first as an optimization, since it will work for all - * public vars in traits and all (public or private) vars in classes. - */ - val symSetter = - if sym.setter.exists then - sym.setter - else - val setterName = sym.asTerm.name.setterName - sym.owner.info.decls.find(d => d.is(Accessor) && d.name == setterName) - val setter = - if (symSetter.exists) symSetter - else sym.accessorNamed(Mixin.traitSetterName(sym.asTerm)) - constrStats += Apply(ref(setter), intoConstr(stat.rhs, sym).withSpan(stat.span) :: Nil) - clsStats += cpy.DefDef(stat)(rhs = EmptyTree) + if sym.isConstExprFinalVal then + if stat.rhs.isInstanceOf[Literal] then + clsStats += stat + else + constrStats += intoConstr(stat.rhs, sym) + clsStats += cpy.DefDef(stat)(rhs = Literal(sym.constExprFinalValConstantType.value).withSpan(stat.span)) + else if !sym.owner.is(Trait) then + clsStats += stat + else + if !stat.rhs.isEmpty && !isWildcardArg(stat.rhs) then + /* !!! Work around #9390 + * This should really just be `sym.setter`. However, if we do that, we'll miss + * setters for mixed in `private var`s. Even though the scope clearly contains the + * setter symbol with the correct Name structure (since the `find` finds it), + * `.decl(setterName)` used by `.setter` through `.accessorNamed` will *not* find it. + * Could it be that the hash table of the `Scope` is corrupted? + * We still try `sym.setter` first as an optimization, since it will work for all + * public vars in traits and all (public or private) vars in classes. + */ + val symSetter = + if sym.setter.exists then + sym.setter + else + val setterName = sym.asTerm.name.setterName + sym.owner.info.decls.find(d => d.is(Accessor) && d.name == setterName) + val setter = + if (symSetter.exists) symSetter + else sym.accessorNamed(Mixin.traitSetterName(sym.asTerm)) + constrStats += Apply(ref(setter), intoConstr(stat.rhs, sym).withSpan(stat.span) :: Nil) + clsStats += cpy.DefDef(stat)(rhs = EmptyTree) case DefDef(nme.CONSTRUCTOR, ((outerParam @ ValDef(nme.OUTER, _, _)) :: _) :: Nil, _, _) => clsStats += mapOuter(outerParam.symbol).transform(stat) case _: DefTree => diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala index be58fb41f1da..b4eb71c541d3 100644 --- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala +++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala @@ -39,7 +39,7 @@ object ContextFunctionResults: val count = contextResultCount(mdef.rhs, mdef.tpt.tpe) if Config.flattenContextFunctionResults && count != 0 && !disabled then - val countAnnot = Annotation(defn.ContextResultCountAnnot, Literal(Constant(count))) + val countAnnot = Annotation(defn.ContextResultCountAnnot, Literal(Constant(count)), mdef.symbol.span) mdef.symbol.addAnnotation(countAnnot) end annotateContextResults @@ -58,7 +58,7 @@ object ContextFunctionResults: */ def contextResultsAreErased(sym: Symbol)(using Context): Boolean = def allErased(tp: Type): Boolean = tp.dealias match - case defn.ContextFunctionType(_, resTpe, isErased) => isErased && allErased(resTpe) + case defn.ContextFunctionType(_, resTpe, erasedParams) => !erasedParams.contains(false) && allErased(resTpe) case _ => true contextResultCount(sym) > 0 && allErased(sym.info.finalResultType) @@ -72,10 +72,8 @@ object ContextFunctionResults: integrateContextResults(rt, crCount) case tp: MethodOrPoly => tp.derivedLambdaType(resType = integrateContextResults(tp.resType, crCount)) - case defn.ContextFunctionType(argTypes, resType, isErased) => - val methodType: MethodTypeCompanion = - if isErased then ErasedMethodType else MethodType - methodType(argTypes, integrateContextResults(resType, crCount - 1)) + case defn.ContextFunctionType(argTypes, resType, erasedParams) => + MethodType(argTypes, integrateContextResults(resType, crCount - 1)) /** The total number of parameters of method `sym`, not counting * erased parameters, but including context result parameters. @@ -85,14 +83,16 @@ object ContextFunctionResults: def contextParamCount(tp: Type, crCount: Int): Int = if crCount == 0 then 0 else - val defn.ContextFunctionType(params, resTpe, isErased) = tp: @unchecked + val defn.ContextFunctionType(params, resTpe, erasedParams) = tp: @unchecked val rest = contextParamCount(resTpe, crCount - 1) - if isErased then rest else params.length + rest + if erasedParams.contains(true) then erasedParams.count(_ == false) + rest else params.length + rest def normalParamCount(tp: Type): Int = tp.widenExpr.stripPoly match case mt @ MethodType(pnames) => val rest = normalParamCount(mt.resType) - if mt.isErasedMethod then rest else pnames.length + rest + if mt.hasErasedParams then + mt.erasedParams.count(_ == false) + rest + else pnames.length + rest case _ => contextParamCount(tp, contextResultCount(sym)) normalParamCount(sym.info) @@ -116,8 +116,14 @@ object ContextFunctionResults: atPhase(erasurePhase)(integrateSelect(tree, n)) else tree match case Select(qual, name) => - if name == nme.apply && defn.isContextFunctionClass(tree.symbol.maybeOwner) then - integrateSelect(qual, n + 1) + if name == nme.apply then + qual.tpe match + case defn.ContextFunctionType(_, _, _) => + integrateSelect(qual, n + 1) + case _ if defn.isContextFunctionClass(tree.symbol.maybeOwner) => // for TermRefs + integrateSelect(qual, n + 1) + case _ => + n > 0 && contextResultCount(tree.symbol) >= n else n > 0 && contextResultCount(tree.symbol) >= n case Ident(name) => @@ -133,4 +139,4 @@ object ContextFunctionResults: case _ => false -end ContextFunctionResults \ No newline at end of file +end ContextFunctionResults diff --git a/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala new file mode 100644 index 000000000000..3081bd5c2b20 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala @@ -0,0 +1,251 @@ +package dotty.tools +package dotc +package transform + +import ast.{Trees, tpd} +import core.* +import Decorators.* +import NameKinds.BoundaryName +import MegaPhase._ +import Types._, Contexts._, Flags._, DenotTransformers._ +import Symbols._, StdNames._, Trees._ +import util.Property +import Constants.Constant +import Flags.MethodOrLazy + +object DropBreaks: + val name: String = "dropBreaks" + val description: String = "replace local Break throws by labeled returns" + + /** Usage data and other info associated with a Label symbol. + * @param goto the return-label to use for a labeled return. + * @param enclMeth the enclosing method + */ + class LabelUsage(val goto: TermSymbol, val enclMeth: Symbol): + /** The number of references to associated label that come from labeled returns */ + var returnRefs: Int = 0 + /** The number of other references to associated label */ + var otherRefs: Int = 0 + + private val LabelUsages = new Property.Key[Map[Symbol, LabelUsage]] + private val ShadowedLabels = new Property.Key[Set[Symbol]] + +/** Rewrites local Break throws to labeled returns. + * Drops `try` statements on breaks if no other uses of its label remain. + * A Break throw with a `Label` created by some enclosing boundary is replaced + * with a labeled return if + * + * - the throw and the boundary are in the same method, and + * - there is no try expression inside the boundary that encloses the throw. + */ +class DropBreaks extends MiniPhase: + import DropBreaks.* + + import tpd._ + + override def phaseName: String = DropBreaks.name + + override def description: String = DropBreaks.description + + override def runsAfterGroupsOf: Set[String] = Set(ElimByName.name) + // we want by-name parameters to be converted to closures + + /** The number of boundary nodes enclosing the currently analized tree. */ + private var enclosingBoundaries: Int = 0 + + private object LabelTry: + + object GuardedThrow: + + /** `(ex, local)` provided `expr` matches + * + * if ex.label.eq(local) then ex.value else throw ex + */ + def unapply(expr: Tree)(using Context): Option[(Symbol, Symbol)] = stripTyped(expr) match + case If( + Apply(Select(Select(ex: Ident, label), eq), (lbl @ Ident(local)) :: Nil), + Select(ex2: Ident, value), + Apply(throww, (ex3: Ident) :: Nil)) + if label == nme.label && eq == nme.eq && local == nme.local && value == nme.value + && throww.symbol == defn.throwMethod + && ex.symbol == ex2.symbol && ex.symbol == ex3.symbol => + Some((ex.symbol, lbl.symbol)) + case _ => + None + end GuardedThrow + + /** `(local, body)` provided `tree` matches + * + * try body + * catch case ex: Break => + * if ex.label.eq(local) then ex.value else throw ex + */ + def unapply(tree: Tree)(using Context): Option[(Symbol, Tree)] = stripTyped(tree) match + case Try(body, CaseDef(pat @ Bind(_, Typed(_, tpt)), EmptyTree, GuardedThrow(exc, local)) :: Nil, EmptyTree) + if tpt.tpe.isRef(defn.BreakClass) && exc == pat.symbol => + Some((local, body)) + case _ => + None + end LabelTry + + private object BreakBoundary: + + /** `(local, body)` provided `tree` matches + * + * { val local: Label[...] = ...; } + */ + def unapply(tree: Tree)(using Context): Option[(Symbol, Tree)] = stripTyped(tree) match + case Block((vd @ ValDef(nme.local, _, _)) :: Nil, LabelTry(caughtAndRhs)) + if vd.symbol.info.isRef(defn.LabelClass) && vd.symbol == caughtAndRhs._1 => + Some(caughtAndRhs) + case _ => + None + end BreakBoundary + + private object Break: + + private def isBreak(sym: Symbol)(using Context): Boolean = + sym.name == nme.break && sym.owner == defn.boundaryModule.moduleClass + + /** `(local, arg)` provided `tree` matches + * + * break[...](arg)(local) + * + * or `(local, ())` provided `tree` matches + * + * break()(local) + */ + def unapply(tree: Tree)(using Context): Option[(Symbol, Tree)] = tree match + case Apply(Apply(fn, args), id :: Nil) + if isBreak(fn.symbol) => + stripInlined(id) match + case id: Ident => + val arg = (args: @unchecked) match + case arg :: Nil => arg + case Nil => Literal(Constant(())).withSpan(tree.span) + Some((id.symbol, arg)) + case _ => None + case _ => None + end Break + + /** The LabelUsage data associated with `lbl` in the current context */ + private def labelUsage(lbl: Symbol)(using Context): Option[LabelUsage] = + for + usesMap <- ctx.property(LabelUsages) + uses <- usesMap.get(lbl) + yield + uses + + /** If `tree` is a BreakBoundary, associate a fresh `LabelUsage` with its label. */ + override def prepareForBlock(tree: Block)(using Context): Context = tree match + case BreakBoundary(label, _) => + enclosingBoundaries += 1 + val mapSoFar = ctx.property(LabelUsages).getOrElse(Map.empty) + val goto = newSymbol(ctx.owner, BoundaryName.fresh(), Synthetic | Label, tree.tpe) + ctx.fresh.setProperty(LabelUsages, + mapSoFar.updated(label, LabelUsage(goto, ctx.owner.enclosingMethod))) + case _ => + ctx + + /** Include all enclosing labels in the `ShadowedLabels` context property. + * This means that breaks to these labels will not be translated to labeled + * returns while this context is valid. + */ + private def shadowLabels(using Context): Context = + ctx.property(LabelUsages) match + case Some(usesMap) => + val setSoFar = ctx.property(ShadowedLabels).getOrElse(Set.empty) + ctx.fresh.setProperty(ShadowedLabels, setSoFar ++ usesMap.keysIterator) + case _ => ctx + + /** Need to suppress labeled returns if there is an intervening try + */ + override def prepareForTry(tree: Try)(using Context): Context = + if enclosingBoundaries == 0 then ctx + else tree match + case LabelTry(_, _) => ctx + case _ => shadowLabels + + override def prepareForValDef(tree: ValDef)(using Context): Context = + if enclosingBoundaries != 0 + && tree.symbol.is(Lazy) + && tree.symbol.owner == ctx.owner.enclosingMethod + then shadowLabels // RHS be converted to a lambda + else ctx + + /** If `tree` is a BreakBoundary, transform it as follows: + * - Wrap it in a labeled block if its label has local uses + * - Drop the try/catch if its label has no other uses + */ + override def transformBlock(tree: Block)(using Context): Tree = tree match + case BreakBoundary(label, expr) => + enclosingBoundaries -= 1 + val uses = ctx.property(LabelUsages).get(label) + val tree1 = + if uses.otherRefs > 1 then + // one non-local ref is always in the catch clause; this one does not count + tree + else + expr + report.log(i"trans boundary block $label // ${uses.returnRefs}, ${uses.otherRefs}") + if uses.returnRefs > 0 then Labeled(uses.goto, tree1) else tree1 + case _ => + tree + + private def isBreak(sym: Symbol)(using Context): Boolean = + sym.name == nme.break && sym.owner == defn.boundaryModule.moduleClass + + private def transformBreak(tree: Tree, arg: Tree, lbl: Symbol)(using Context): Tree = + report.log(i"transform break $tree/$arg/$lbl") + labelUsage(lbl) match + case Some(uses: LabelUsage) + if uses.enclMeth == ctx.owner.enclosingMethod + && !ctx.property(ShadowedLabels).getOrElse(Set.empty).contains(lbl) + => + uses.otherRefs -= 1 + uses.returnRefs += 1 + Return(arg, ref(uses.goto)).withSpan(arg.span) + case _ => + tree + + + /** Rewrite a break call + * + * break.apply[...](value)(using lbl) + * + * where `lbl` is a label defined in the current method and is not included in + * ShadowedLabels to + * + * return[target] arg + * + * where `target` is the `goto` return label associated with `lbl`. + * Adjust associated ref counts accordingly. The local refcount is increased + * and the non-local refcount is decreased, since the `lbl` implicit argument + * to `break` is dropped. + */ + override def transformApply(tree: Apply)(using Context): Tree = + if enclosingBoundaries == 0 then tree + else tree match + case Break(lbl, arg) => + labelUsage(lbl) match + case Some(uses: LabelUsage) + if uses.enclMeth == ctx.owner.enclosingMethod + && !ctx.property(ShadowedLabels).getOrElse(Set.empty).contains(lbl) + => + uses.otherRefs -= 1 + uses.returnRefs += 1 + Return(arg, ref(uses.goto)).withSpan(arg.span) + case _ => tree + case _ => tree + + /** If `tree` refers to an enclosing label, increase its non local recount. + * This increase is corrected in `transformInlined` if the reference turns + * out to be part of a BreakThrow to a local, non-shadowed label. + */ + override def transformIdent(tree: Ident)(using Context): Tree = + if enclosingBoundaries != 0 then + for uses <- labelUsage(tree.symbol) do + uses.otherRefs += 1 + tree + +end DropBreaks diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala index 479a455b4aea..151e841f0e48 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala @@ -15,6 +15,7 @@ import MegaPhase.* import Decorators.* import typer.RefChecks import reporting.trace +import dotty.tools.dotc.core.Names.Name /** This phase implements the following transformations: * @@ -79,11 +80,14 @@ class ElimByName extends MiniPhase, InfoTransformer: case ExprType(rt) if exprBecomesFunction(sym) => defn.ByNameFunction(rt) case tp: MethodType => - def exprToFun(tp: Type) = tp match - case ExprType(rt) => defn.ByNameFunction(rt) + def exprToFun(tp: Type, name: Name) = tp match + case ExprType(rt) => + if rt.hasAnnotation(defn.ErasedParamAnnot) then + report.error(em"By-name parameter cannot be erased: $name", sym.srcPos) + defn.ByNameFunction(rt) case tp => tp tp.derivedLambdaType( - paramInfos = tp.paramInfos.mapConserve(exprToFun), + paramInfos = tp.paramInfos.zipWithConserve(tp.paramNames)(exprToFun), resType = transformInfo(tp.resType, sym)) case tp: PolyType => tp.derivedLambdaType(resType = transformInfo(tp.resType, sym)) diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala index bdc2a268c1f8..359b882ef26b 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala @@ -51,10 +51,10 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => // see https://github.com/scala/bug/issues/11714 val validJava = isValidJavaVarArgs(sym.info) if !validJava then - report.error("""To generate java-compatible varargs: + report.error(em"""To generate java-compatible varargs: | - there must be a single repeated parameter | - it must be the last argument in the last parameter list - |""".stripMargin, + |""", sym.sourcePos) else addVarArgsForwarder(sym, isJavaVarargsOverride, hasAnnotation, parentHasAnnotation) @@ -87,7 +87,8 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => * signatures of a Java varargs method and a Scala varargs override are not the same. */ private def overridesJava(sym: Symbol)(using Context) = - sym.owner.info.baseClasses.drop(1).exists { bc => + sym.memberCanMatchInheritedSymbols + && sym.owner.info.baseClasses.drop(1).exists { bc => bc.is(JavaDefined) && { val other = bc.info.nonPrivateDecl(sym.name) other.hasAltWith { alt => diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 84005424e3ec..981dd5f60aea 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -500,7 +500,7 @@ object Erasure { if isFunction && !ctx.settings.scalajs.value then val arity = implParamTypes.length val specializedFunctionalInterface = - if defn.isSpecializableFunctionSAM(implParamTypes, implResultType) then + if !implType.hasErasedParams && defn.isSpecializableFunctionSAM(implParamTypes, implResultType) then // Using these subclasses is critical to avoid boxing since their // SAM is a specialized method `apply$mc*$sp` whose default // implementation in FunctionN boxes. @@ -549,28 +549,30 @@ object Erasure { /** Check that Java statics and packages can only be used in selections. */ - private def checkNotErased(tree: Tree)(using Context): tree.type = { - if (!ctx.mode.is(Mode.Type)) { + private def checkNotErased(tree: Tree)(using Context): tree.type = + if !ctx.mode.is(Mode.Type) then if isErased(tree) then val msg = if tree.symbol.is(Flags.Inline) then em"""${tree.symbol} is declared as `inline`, but was not inlined | - |Try increasing `-Xmax-inlines` above ${ctx.settings.XmaxInlines.value}""".stripMargin - else em"${tree.symbol} is declared as `erased`, but is in fact used" + |Try increasing `-Xmax-inlines` above ${ctx.settings.XmaxInlines.value}""" + else + em"${tree.symbol} is declared as `erased`, but is in fact used" report.error(msg, tree.srcPos) - tree.symbol.getAnnotation(defn.CompileTimeOnlyAnnot) match { + tree.symbol.getAnnotation(defn.CompileTimeOnlyAnnot) match case Some(annot) => - def defaultMsg = - i"""Reference to ${tree.symbol.showLocated} should not have survived, - |it should have been processed and eliminated during expansion of an enclosing macro or term erasure.""" - val message = annot.argumentConstant(0).fold(defaultMsg)(_.stringValue) + val message = annot.argumentConstant(0) match + case Some(c) => + c.stringValue.toMessage + case _ => + em"""Reference to ${tree.symbol.showLocated} should not have survived, + |it should have been processed and eliminated during expansion of an enclosing macro or term erasure.""" report.error(message, tree.srcPos) case _ => // OK - } - } + checkNotErasedClass(tree) - } + end checkNotErased private def checkNotErasedClass(tp: Type, tree: untpd.Tree)(using Context): Unit = tp match case JavaArrayType(et) => @@ -614,7 +616,7 @@ object Erasure { * are handled separately by [[typedDefDef]], [[typedValDef]] and [[typedTyped]]. */ override def typedTypeTree(tree: untpd.TypeTree, pt: Type)(using Context): TypeTree = - checkNotErasedClass(tree.withType(erasure(tree.tpe))) + checkNotErasedClass(tree.withType(erasure(tree.typeOpt))) /** This override is only needed to semi-erase type ascriptions */ override def typedTyped(tree: untpd.Typed, pt: Type)(using Context): Tree = @@ -677,6 +679,8 @@ object Erasure { val qualTp = tree.qualifier.typeOpt.widen if qualTp.derivesFrom(defn.PolyFunctionClass) then erasePolyFunctionApply(qualTp.select(nme.apply).widen).classSymbol + else if defn.isErasedFunctionType(qualTp) then + eraseErasedFunctionApply(qualTp.select(nme.apply).widen.asInstanceOf[MethodType]).classSymbol else NoSymbol } @@ -696,18 +700,20 @@ object Erasure { return tree.asInstanceOf[Tree] // we are re-typing a primitive array op val owner = mapOwner(origSym) - var sym = if (owner eq origSym.maybeOwner) origSym else owner.info.decl(tree.name).symbol - if !sym.exists then - // We fail the sym.exists test for pos/i15158.scala, where we pass an infinitely - // recurring match type to an overloaded constructor. An equivalent test - // with regular apply methods succeeds. It's at present unclear whether - // - the program should be rejected, or - // - there is another fix. - // Therefore, we apply the fix to use the pre-erasure symbol, but only - // for constructors, in order not to mask other possible bugs that would - // trigger the assert(sym.exists, ...) below. - val prevSym = tree.symbol(using preErasureCtx) - if prevSym.isConstructor then sym = prevSym + val sym = + (if (owner eq origSym.maybeOwner) origSym else owner.info.decl(tree.name).symbol) + .orElse { + // We fail the sym.exists test for pos/i15158.scala, where we pass an infinitely + // recurring match type to an overloaded constructor. An equivalent test + // with regular apply methods succeeds. It's at present unclear whether + // - the program should be rejected, or + // - there is another fix. + // Therefore, we apply the fix to use the pre-erasure symbol, but only + // for constructors, in order not to mask other possible bugs that would + // trigger the assert(sym.exists, ...) below. + val prevSym = tree.symbol(using preErasureCtx) + if prevSym.isConstructor then prevSym else NoSymbol + } assert(sym.exists, i"no owner from $owner/${origSym.showLocated} in $tree") @@ -770,7 +776,7 @@ object Erasure { select(qual1, sym) else val castTarget = // Avoid inaccessible cast targets, see i8661 - if isJvmAccessible(sym.owner) + if isJvmAccessible(sym.owner) && sym.owner.isType then sym.owner.typeRef else @@ -780,7 +786,7 @@ object Erasure { val tp = originalQual if tp =:= qual1.tpe.widen then return errorTree(qual1, - ex"Unable to emit reference to ${sym.showLocated}, ${sym.owner} is not accessible in ${ctx.owner.enclosingClass}") + em"Unable to emit reference to ${sym.showLocated}, ${sym.owner} is not accessible in ${ctx.owner.enclosingClass}") tp recur(cast(qual1, castTarget)) } @@ -823,7 +829,10 @@ object Erasure { val Apply(fun, args) = tree val origFun = fun.asInstanceOf[tpd.Tree] val origFunType = origFun.tpe.widen(using preErasureCtx) - val ownArgs = if origFunType.isErasedMethod then Nil else args + val ownArgs = origFunType match + case mt: MethodType if mt.hasErasedParams => + args.zip(mt.erasedParams).collect { case (arg, false) => arg } + case _ => args val fun1 = typedExpr(fun, AnyFunctionProto) fun1.tpe.widen match case mt: MethodType => diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index cd6753eaed69..0bfc444e0997 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -145,15 +145,13 @@ class ExpandSAMs extends MiniPhase: def translateMatch(tree: Match, pfParam: Symbol, cases: List[CaseDef], defaultValue: Tree)(using Context) = { val selector = tree.selector - val selectorTpe = selector.tpe.widen - val defaultSym = newSymbol(pfParam.owner, nme.WILDCARD, SyntheticCase, selectorTpe) - val defaultCase = - CaseDef( - Bind(defaultSym, Underscore(selectorTpe)), - EmptyTree, - defaultValue) - val unchecked = selector.annotated(New(ref(defn.UncheckedAnnot.typeRef))) - cpy.Match(tree)(unchecked, cases :+ defaultCase) + val cases1 = if cases.exists(isDefaultCase) then cases + else + val selectorTpe = selector.tpe.widen + val defaultSym = newSymbol(pfParam.owner, nme.WILDCARD, SyntheticCase, selectorTpe) + val defaultCase = CaseDef(Bind(defaultSym, Underscore(selectorTpe)), EmptyTree, defaultValue) + cases :+ defaultCase + cpy.Match(tree)(selector, cases1) .subst(param.symbol :: Nil, pfParam :: Nil) // Needed because a partial function can be written as: // param => param match { case "foo" if foo(param) => param } @@ -186,7 +184,7 @@ class ExpandSAMs extends MiniPhase: private def checkRefinements(tpe: Type, tree: Tree)(using Context): Type = tpe.dealias match { case RefinedType(parent, name, _) => if (name.isTermName && tpe.member(name).symbol.ownersIterator.isEmpty) // if member defined in the refinement - report.error("Lambda does not define " + name, tree.srcPos) + report.error(em"Lambda does not define $name", tree.srcPos) checkRefinements(parent, tree) case tpe => tpe diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index 00074a6ea81a..deb1f665c022 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -13,6 +13,7 @@ import core.Decorators._ import core.StdNames.nme import core.Names._ import core.NameOps._ +import core.NameKinds.SuperArgName import SymUtils._ import dotty.tools.dotc.ast.tpd @@ -176,8 +177,9 @@ object ExplicitOuter { if prefix == NoPrefix then outerCls.typeRef.appliedTo(outerCls.typeParams.map(_ => TypeBounds.empty)) else prefix.widen) val info = if (flags.is(Method)) ExprType(target) else target + val currentNestingLevel = ctx.nestingLevel atPhaseNoEarlier(explicitOuterPhase.next) { // outer accessors are entered at explicitOuter + 1, should not be defined before. - newSymbol(owner, name, SyntheticArtifact | flags, info, coord = cls.coord) + newSymbol(owner, name, SyntheticArtifact | flags, info, coord = cls.coord, nestingLevel = currentNestingLevel) } } @@ -196,11 +198,17 @@ object ExplicitOuter { private def outerAccName(cls: ClassSymbol)(using Context): TermName = nme.OUTER.expandedName(cls) + private def outerOwner(sym: Symbol)(using Context): Symbol = + val owner = sym.effectiveOwner + if owner.name.is(SuperArgName) || owner.isLocalDummy + then owner.enclosingClass + else owner + /** Class needs an outer pointer, provided there is a reference to an outer this in it. */ def needsOuterIfReferenced(cls: ClassSymbol)(using Context): Boolean = - !(cls.isStatic || - cls.owner.enclosingClass.isStaticOwner || - cls.is(PureInterface) + !(cls.isStatic + || outerOwner(cls).isStaticOwner + || cls.is(PureInterface) ) /** Class unconditionally needs an outer pointer. This is the case if @@ -225,7 +233,9 @@ object ExplicitOuter { /** The outer parameter accessor of cass `cls` */ private def outerParamAccessor(cls: ClassSymbol)(using Context): TermSymbol = - cls.info.decl(nme.OUTER).symbol.asTerm + val outer = cls.info.decl(nme.OUTER).symbol + assert(outer.isTerm, i"missing outer accessor in $cls") + outer.asTerm /** The outer accessor of class `cls`. To find it is a bit tricky. The * class might have been moved with new owners between ExplicitOuter and Erasure, @@ -255,7 +265,6 @@ object ExplicitOuter { */ def referencesOuter(cls: Symbol, tree: Tree)(using Context): Boolean = - val test = new TreeAccumulator[Boolean]: private var inInline = false @@ -301,19 +310,20 @@ object ExplicitOuter { def containsOuterRefs(t: Tree): Boolean = t match case _: This | _: Ident => isOuterRef(t.tpe) case nw: New => - val newCls = nw.tpe.classSymbol + val newType = nw.tpe.dealias + val newCls = newType.classSymbol isOuterSym(newCls.owner.enclosingClass) || - hasOuterPrefix(nw.tpe) || + hasOuterPrefix(newType) || newCls.owner.isTerm && cls.isProperlyContainedIn(newCls) // newCls might get proxies for free variables. If current class is // properly contained in newCls, it needs an outer path to newCls access the // proxies and forward them to the new instance. case app: TypeApply if app.symbol.isTypeTest => // Type tests of singletons translate to `eq` tests with references, which might require outer pointers - containsOuterRefsAtTopLevel(app.args.head.tpe) + containsOuterRefsAtTopLevel(app.args.head.tpe.dealias) case t: TypeTree if inInline => // Expansions of inline methods must be able to address outer types - containsOuterRefsAnywhere(t.tpe) + containsOuterRefsAnywhere(t.tpe.dealias) case _ => false diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala index 9c580235a2e4..a430f7532066 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala @@ -13,7 +13,7 @@ import core._ import Types._, Contexts._, Names._, Flags._, DenotTransformers._, Phases._ import SymDenotations._, Symbols._, StdNames._, Denotations._ import TypeErasure.{ valueErasure, ErasedValueType } -import NameKinds.ExtMethName +import NameKinds.{ExtMethName, BodyRetainerName} import Decorators._ import TypeUtils._ @@ -79,7 +79,7 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete // because it adds extension methods before pickling. if (!(valueClass.is(Scala2x))) for (decl <- valueClass.classInfo.decls) - if (isMethodWithExtension(decl)) + if isMethodWithExtension(decl) then enterInModuleClass(createExtensionMethod(decl, moduleClassSym.symbol)) // Create synthetic methods to cast values between the underlying type @@ -179,7 +179,10 @@ object ExtensionMethods { /** Name of the extension method that corresponds to given instance method `meth`. */ def extensionName(imeth: Symbol)(using Context): TermName = - ExtMethName(imeth.name.asTermName) + ExtMethName( + imeth.name.asTermName match + case BodyRetainerName(name) => name + case name => name) /** Return the extension method that corresponds to given instance method `meth`. */ def extensionMethod(imeth: Symbol)(using Context): TermSymbol = @@ -188,9 +191,17 @@ object ExtensionMethods { val companion = imeth.owner.companionModule val companionInfo = companion.info val candidates = companionInfo.decl(extensionName(imeth)).alternatives - val matching = - // See the documentation of `memberSignature` to understand why `.stripPoly.ensureMethodic` is needed here. - candidates filter (c => FullParameterization.memberSignature(c.info) == imeth.info.stripPoly.ensureMethodic.signature) + def matches(candidate: SingleDenotation) = + FullParameterization.memberSignature(candidate.info) == imeth.info.stripPoly.ensureMethodic.signature + // See the documentation of `memberSignature` to understand why `.stripPoly.ensureMethodic` is needed here. + && (if imeth.targetName == imeth.name then + // imeth does not have a @targetName annotation, candidate should not have one either + candidate.symbol.targetName == candidate.symbol.name + else + // imeth has a @targetName annotation, candidate's target name must match + imeth.targetName == candidate.symbol.targetName + ) + val matching = candidates.filter(matches) assert(matching.nonEmpty, i"""no extension method found for: | @@ -203,6 +214,9 @@ object ExtensionMethods { | Candidates (signatures normalized): | | ${candidates.map(c => s"${c.name}:${c.info.signature}:${FullParameterization.memberSignature(c.info)}").mkString("\n")}""") + if matching.tail.nonEmpty then + // this case will report a "have the same erasure" error later at erasure pahse + report.log(i"mutiple extension methods match $imeth: ${candidates.map(c => i"${c.name}:${c.info}")}") matching.head.symbol.asTerm } } diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index a7e0795ce195..03639c8af689 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -18,6 +18,7 @@ import NameKinds.OuterSelectName import StdNames._ import TypeUtils.isErasedValueType import config.Feature +import inlines.Inlines.inInlineMethod object FirstTransform { val name: String = "firstTransform" diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index 9a6ab233e239..a1baeac272b9 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -5,16 +5,19 @@ package transform import core.Annotations._ import core.Contexts._ import core.Phases._ +import core.Decorators.* import core.Definitions import core.Flags._ import core.Names.Name import core.Symbols._ import core.TypeApplications.{EtaExpansion, TypeParamInfo} -import core.TypeErasure.{erasedGlb, erasure, isGenericArrayElement} +import core.TypeErasure.{erasedGlb, erasure, fullErasure, isGenericArrayElement} import core.Types._ import core.classfile.ClassfileConstants import SymUtils._ import TypeUtils._ +import config.Printers.transforms +import reporting.trace import java.lang.StringBuilder import scala.collection.mutable.ListBuffer @@ -130,12 +133,12 @@ object GenericSignatures { else Right(parent)) - def paramSig(param: LambdaParam): Unit = { - builder.append(sanitizeName(param.paramName)) + def paramSig(param: TypeParamInfo): Unit = { + builder.append(sanitizeName(param.paramName.lastPart)) boundsSig(hiBounds(param.paramInfo.bounds)) } - def polyParamSig(tparams: List[LambdaParam]): Unit = + def polyParamSig(tparams: List[TypeParamInfo]): Unit = if (tparams.nonEmpty) { builder.append('<') tparams.foreach(paramSig) @@ -236,7 +239,11 @@ object GenericSignatures { tp match { case ref @ TypeParamRef(_: PolyType, _) => - typeParamSig(ref.paramName.lastPart) + val erasedUnderlying = fullErasure(ref.underlying.bounds.hi) + // don't emit type param name if the param is upper-bounded by a primitive type (including via a value class) + if erasedUnderlying.isPrimitiveValueType then + jsig(erasedUnderlying, toplevel, primitiveOK) + else typeParamSig(ref.paramName.lastPart) case defn.ArrayOf(elemtp) => if (isGenericArrayElement(elemtp, isScala2 = false)) @@ -267,7 +274,7 @@ object GenericSignatures { else if (sym == defn.UnitClass) jsig(defn.BoxedUnitClass.typeRef) else builder.append(defn.typeTag(sym.info)) else if (ValueClasses.isDerivedValueClass(sym)) { - val erasedUnderlying = core.TypeErasure.fullErasure(tp) + val erasedUnderlying = fullErasure(tp) if (erasedUnderlying.isPrimitiveValueType && !primitiveOK) classSig(sym, pre, args) else @@ -304,7 +311,9 @@ object GenericSignatures { case mtpe: MethodType => // erased method parameters do not make it to the bytecode. def effectiveParamInfoss(t: Type)(using Context): List[List[Type]] = t match { - case t: MethodType if t.isErasedMethod => effectiveParamInfoss(t.resType) + case t: MethodType if t.hasErasedParams => + t.paramInfos.zip(t.erasedParams).collect{ case (i, false) => i } + :: effectiveParamInfoss(t.resType) case t: MethodType => t.paramInfos :: effectiveParamInfoss(t.resType) case _ => Nil } @@ -334,15 +343,6 @@ object GenericSignatures { jsig(repr, primitiveOK = primitiveOK) case ci: ClassInfo => - def polyParamSig(tparams: List[TypeParamInfo]): Unit = - if (tparams.nonEmpty) { - builder.append('<') - tparams.foreach { tp => - builder.append(sanitizeName(tp.paramName.lastPart)) - boundsSig(hiBounds(tp.paramInfo.bounds)) - } - builder.append('>') - } val tParams = tp.typeParams if (toplevel) polyParamSig(tParams) superSig(ci.typeSymbol, ci.parents) diff --git a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala index edbfbd1552c4..9a36d65babe8 100644 --- a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala +++ b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala @@ -13,6 +13,7 @@ import collection.mutable import ast.Trees._ import core.NameKinds.SuperArgName import SymUtils._ +import core.Decorators.* object HoistSuperArgs { val name: String = "hoistSuperArgs" @@ -181,7 +182,9 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase /** Hoist complex arguments in super call out of the class. */ def hoistSuperArgsFromCall(superCall: Tree, cdef: DefDef, lifted: mutable.ListBuffer[Symbol]): Tree = superCall match - case Block(defs, expr) => + case Block(defs, expr) if !expr.symbol.owner.is(Scala2x) => + // MO: The guard avoids the crash for #16351. + // It would be good to dig deeper, but I won't have the time myself to do it. cpy.Block(superCall)( stats = defs.mapconserve { case vdef: ValDef => diff --git a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala index 6edb60a77245..798f34757b35 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala @@ -8,6 +8,8 @@ import Symbols._, Contexts._, Types._, Decorators._ import NameOps._ import Names._ +import scala.collection.mutable.ListBuffer + /** Rewrite an application * * {new { def unapply(x0: X0)(x1: X1,..., xn: Xn) = b }}.unapply(y0)(y1, ..., yn) @@ -38,7 +40,7 @@ class InlinePatterns extends MiniPhase: if app.symbol.name.isUnapplyName && !app.tpe.isInstanceOf[MethodicType] then app match case App(Select(fn, name), argss) => - val app1 = betaReduce(app, fn, name, argss.flatten) + val app1 = betaReduce(app, fn, name, argss) if app1 ne app then report.log(i"beta reduce $app -> $app1") app1 case _ => @@ -51,11 +53,16 @@ class InlinePatterns extends MiniPhase: case Apply(App(fn, argss), args) => (fn, argss :+ args) case _ => (app, Nil) - private def betaReduce(tree: Apply, fn: Tree, name: Name, args: List[Tree])(using Context): Tree = + // TODO merge with BetaReduce.scala + private def betaReduce(tree: Apply, fn: Tree, name: Name, argss: List[List[Tree]])(using Context): Tree = fn match case Block(TypeDef(_, template: Template) :: Nil, Apply(Select(New(_),_), Nil)) if template.constr.rhs.isEmpty => template.body match - case List(ddef @ DefDef(`name`, _, _, _)) => BetaReduce(ddef, args) + case List(ddef @ DefDef(`name`, _, _, _)) => + val bindings = new ListBuffer[DefTree]() + val expansion1 = BetaReduce.reduceApplication(ddef, argss, bindings) + val bindings1 = bindings.result() + seq(bindings1, expansion1) case _ => tree case _ => tree diff --git a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala index 65212ec2c0cc..047a187bad68 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala @@ -38,8 +38,8 @@ class InlineVals extends MiniPhase: tpt.tpe.widenTermRefExpr.dealiasKeepOpaques.normalized match case tp: ConstantType => if !isPureExpr(rhs) then - val details = if enclosingInlineds.isEmpty then "" else em"but was: $rhs" - report.error(s"inline value must be pure$details", rhs.srcPos) + def details = if enclosingInlineds.isEmpty then "" else i"but was: $rhs" + report.error(em"inline value must be pure$details", rhs.srcPos) case tp => if tp.typeSymbol.is(Opaque) then report.error(em"The type of an `inline val` cannot be an opaque type.\n\nTo inline, consider using `inline def` instead", rhs) diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 5ddcf600c63a..10f73fa94e08 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -7,14 +7,18 @@ import Contexts._ import Symbols._ import SymUtils._ import dotty.tools.dotc.ast.tpd - -import dotty.tools.dotc.core.StagingContext._ +import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.quoted._ import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.ast.TreeMapWithImplicits +import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer +import dotty.tools.dotc.staging.StagingLevel +import scala.collection.mutable.ListBuffer /** Inlines all calls to inline methods that are not in an inline method or a quote */ class Inlining extends MacroTransform { + import tpd._ override def phaseName: String = Inlining.name @@ -23,8 +27,10 @@ class Inlining extends MacroTransform { override def allowsImplicitSearch: Boolean = true + override def changesMembers: Boolean = true + override def run(using Context): Unit = - if ctx.compilationUnit.needsInlining then + if ctx.compilationUnit.needsInlining || ctx.compilationUnit.hasMacroAnnotations then try super.run catch case _: CompilationUnit.SuspendException => () @@ -39,11 +45,7 @@ class Inlining extends MacroTransform { new TreeTraverser { def traverse(tree: Tree)(using Context): Unit = tree match - case _: GenericApply if tree.symbol.isQuote => - traverseChildren(tree)(using StagingContext.quoteContext) - case _: GenericApply if tree.symbol.isExprSplice => - traverseChildren(tree)(using StagingContext.spliceContext) - case tree: RefTree if !Inlines.inInlineMethod && StagingContext.level == 0 => + case tree: RefTree if !Inlines.inInlineMethod && StagingLevel.level == 0 => assert(!tree.symbol.isInlineMethod, tree.show) case _ => traverseChildren(tree) @@ -57,10 +59,33 @@ class Inlining extends MacroTransform { } private class InliningTreeMap extends TreeMapWithImplicits { + + /** List of top level classes added by macro annotation in a package object. + * These are added to the PackageDef that owns this particular package object. + */ + private val newTopClasses = MutableSymbolMap[ListBuffer[Tree]]() + override def transform(tree: Tree)(using Context): Tree = { tree match - case tree: DefTree => + case tree: MemberDef => if tree.symbol.is(Inline) then tree + else if tree.symbol.is(Param) then super.transform(tree) + else if + !tree.symbol.isPrimaryConstructor + && StagingLevel.level == 0 + && MacroAnnotations.hasMacroAnnotation(tree.symbol) + then + val trees = (new MacroAnnotations).expandAnnotations(tree) + val trees1 = trees.map(super.transform) + + // Find classes added to the top level from a package object + val (topClasses, trees2) = + if ctx.owner.isPackageObject then trees1.partition(_.symbol.owner == ctx.owner.owner) + else (Nil, trees1) + if topClasses.nonEmpty then + newTopClasses.getOrElseUpdate(ctx.owner.owner, new ListBuffer) ++= topClasses + + flatTree(trees2) else super.transform(tree) case _: Typed | _: Block => super.transform(tree) @@ -68,12 +93,19 @@ class Inlining extends MacroTransform { val tree1 = super.transform(tree) if tree1.tpe.isError then tree1 else Inlines.inlineCall(tree1) - case _: GenericApply if tree.symbol.isQuote => - super.transform(tree)(using StagingContext.quoteContext) - case _: GenericApply if tree.symbol.isExprSplice => - super.transform(tree)(using StagingContext.spliceContext) + case _: PackageDef => + super.transform(tree) match + case tree1: PackageDef => + newTopClasses.get(tree.symbol.moduleClass) match + case Some(topClasses) => + newTopClasses.remove(tree.symbol.moduleClass) + val newStats = tree1.stats ::: topClasses.result() + cpy.PackageDef(tree1)(tree1.pid, newStats) + case _ => tree1 + case tree1 => tree1 case _ => - super.transform(tree) + if tree.isType then tree + else super.transform(tree) } } } diff --git a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala index c69b342b9a01..29572a4ae30d 100644 --- a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala +++ b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala @@ -11,6 +11,7 @@ import core.DenotTransformers.IdentityDenotTransformer import core.Symbols.{defn, Symbol} import core.Constants.Constant import core.NameOps.isContextFunction +import core.StdNames.nme import core.Types.* import coverage.* import typer.LiftCoverage @@ -325,7 +326,11 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: // Only transform the params (for the default values) and the rhs, not the name and tpt. val transformedParamss = transformParamss(tree.paramss) val transformedRhs = - if !sym.isOneOf(Accessor | Artifact | Synthetic) && !tree.rhs.isEmpty then + if tree.rhs.isEmpty then + tree.rhs + else if sym.isClassConstructor then + instrumentSecondaryCtor(tree) + else if !sym.isOneOf(Accessor | Artifact | Synthetic) then // If the body can be instrumented, do it (i.e. insert a "coverage call" at the beginning) // This is useful because methods can be stored and called later, or called by reflection, // and if the rhs is too simple to be instrumented (like `def f = this`), @@ -410,6 +415,24 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: val coverageCall = createInvokeCall(parent, pos) InstrumentedParts.singleExprTree(coverageCall, body) + /** Instruments the body of a secondary constructor DefDef. + * + * We must preserve the delegate constructor call as the first statement of + * the rhs Block, otherwise `HoistSuperArgs` will not be happy (see #17042). + */ + private def instrumentSecondaryCtor(ctorDef: DefDef)(using Context): Tree = + // compute position like in instrumentBody + val namePos = ctorDef.namePos + val pos = namePos.withSpan(namePos.span.withStart(ctorDef.span.start)) + val coverageCall = createInvokeCall(ctorDef, pos) + + ctorDef.rhs match + case b @ Block(delegateCtorCall :: stats, expr: Literal) => + cpy.Block(b)(transform(delegateCtorCall) :: coverageCall :: stats.mapConserve(transform), expr) + case rhs => + cpy.Block(rhs)(transform(rhs) :: coverageCall :: Nil, unitLiteral) + end instrumentSecondaryCtor + /** * Checks if the apply needs a lift in the coverage phase. * In case of a nested application, we have to lift all arguments @@ -447,9 +470,14 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: /** Check if an Apply can be instrumented. Prevents this phase from generating incorrect code. */ private def canInstrumentApply(tree: Apply)(using Context): Boolean = + def isSecondaryCtorDelegateCall: Boolean = tree.fun match + case Select(This(_), nme.CONSTRUCTOR) => true + case _ => false + val sym = tree.symbol !sym.isOneOf(ExcludeMethodFlags) && !isCompilerIntrinsicMethod(sym) + && !(sym.isClassConstructor && isSecondaryCtorDelegateCall) && (tree.typeOpt match case AppliedType(tycon: NamedType, _) => /* If the last expression in a block is a context function, we'll try to diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala index ad068b84c041..c95500d856be 100644 --- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala @@ -65,7 +65,7 @@ class InterceptedMethods extends MiniPhase { override def transformApply(tree: Apply)(using Context): Tree = { lazy val qual = tree.fun match { case Select(qual, _) => qual - case ident @ Ident(_) => + case ident: Ident => ident.tpe match { case TermRef(prefix: TermRef, _) => tpd.ref(prefix) diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index 3b37ef130231..b433e37e39c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -112,7 +112,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { appendOffsetDefs.get(cls) match { case None => template case Some(data) => - data.defs.foreach(_.symbol.addAnnotation(Annotation(defn.ScalaStaticAnnot))) + data.defs.foreach(defin => defin.symbol.addAnnotation(Annotation(defn.ScalaStaticAnnot, defin.symbol.span))) cpy.Template(template)(body = addInFront(data.defs, template.body)) } } @@ -283,7 +283,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * * ``` * private @volatile var _x: AnyRef = null - * + * * def x: A = * val result = _x * if result.isInstanceOf[A] then @@ -292,7 +292,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * null // possible unboxing applied here * else * x_compute() // possible unboxing applied here - * + * * private def x_compute(): AnyRef = * while do * val current: AnyRef = _x @@ -448,10 +448,10 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { assert(!(x.symbol is Mutable)) - if ctx.settings.YlightweightLazyVals.value then - transformMemberDefThreadSafeNew(x) - else + if ctx.settings.YlegacyLazyVals.value then transformMemberDefThreadSafeLegacy(x) + else + transformMemberDefThreadSafeNew(x) } def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = { @@ -464,15 +464,10 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { def offsetName(id: Int) = s"${StdNames.nme.LAZY_FIELD_OFFSET}${if (x.symbol.owner.is(Module)) "_m_" else ""}$id".toTermName val containerName = LazyLocalName.fresh(x.name.asTermName) val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags | Private, defn.ObjectType, coord = x.symbol.coord).enteredAfter(this) - containerSymbol.addAnnotation(Annotation(defn.VolatileAnnot)) // private @volatile var _x: AnyRef + containerSymbol.addAnnotation(Annotation(defn.VolatileAnnot, containerSymbol.span)) // private @volatile var _x: AnyRef containerSymbol.addAnnotations(x.symbol.annotations) // pass annotations from original definition - val stat = x.symbol.isStatic - if stat then - containerSymbol.setFlag(JavaStatic) + containerSymbol.removeAnnotation(defn.ScalaStaticAnnot) val getOffset = - if stat then - Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getStaticFieldOffset) - else Select(ref(defn.LazyValsModule), lazyNme.RLazyVals.getOffsetStatic) val containerTree = ValDef(containerSymbol, nullLiteral) @@ -482,7 +477,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { newSymbol(claz, offsetName(info.defs.size), Synthetic, defn.LongType).enteredAfter(this) case None => newSymbol(claz, offsetName(0), Synthetic, defn.LongType).enteredAfter(this) - offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot)) + offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot, offsetSymbol.nn.span)) val fieldTree = thizClass.select(lazyNme.RLazyVals.getDeclaredField).appliedTo(Literal(Constant(containerName.mangledString))) val offsetTree = ValDef(offsetSymbol.nn, getOffset.appliedTo(fieldTree)) val offsetInfo = appendOffsetDefs.getOrElseUpdate(claz, new OffsetInfo(Nil)) @@ -490,9 +485,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val offset = ref(offsetSymbol.nn) val swapOver = - if stat then - tpd.clsOf(x.symbol.owner.typeRef) - else This(claz) val (accessorDef, initMethodDef) = mkThreadSafeDef(x, claz, containerSymbol, offset, swapOver) @@ -625,7 +617,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { .symbol.asTerm else { // need to create a new flag offsetSymbol = newSymbol(claz, offsetById, Synthetic, defn.LongType).enteredAfter(this) - offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot)) + offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot, offsetSymbol.nn.span)) val flagName = LazyBitMapName.fresh(id.toString.toTermName) val flagSymbol = newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this) flag = ValDef(flagSymbol, Literal(Constant(0L))) @@ -636,7 +628,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { case None => offsetSymbol = newSymbol(claz, offsetName(0), Synthetic, defn.LongType).enteredAfter(this) - offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot)) + offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot, offsetSymbol.nn.span)) val flagName = LazyBitMapName.fresh("0".toTermName) val flagSymbol = newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this) flag = ValDef(flagSymbol, Literal(Constant(0L))) @@ -682,7 +674,6 @@ object LazyVals { val cas: TermName = N.cas.toTermName val getOffset: TermName = N.getOffset.toTermName val getOffsetStatic: TermName = "getOffsetStatic".toTermName - val getStaticFieldOffset: TermName = "getStaticFieldOffset".toTermName val getDeclaredField: TermName = "getDeclaredField".toTermName } val flag: TermName = "flag".toTermName diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala new file mode 100644 index 000000000000..cc2e6118d1fa --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala @@ -0,0 +1,142 @@ +package dotty.tools.dotc +package transform + +import scala.language.unsafeNulls + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.Trees.* +import dotty.tools.dotc.config.Printers.{macroAnnot => debug} +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.DenotTransformers.DenotTransformer +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.MacroClassLoader +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.quoted.* +import dotty.tools.dotc.util.SrcPos +import scala.quoted.runtime.impl.{QuotesImpl, SpliceScope} + +import scala.quoted.Quotes +import scala.util.control.NonFatal + +import java.lang.reflect.InvocationTargetException + +class MacroAnnotations: + import tpd.* + import MacroAnnotations.* + + /** Expands every macro annotation that is on this tree. + * Returns a list with transformed definition and any added definitions. + */ + def expandAnnotations(tree: MemberDef)(using Context): List[DefTree] = + if !hasMacroAnnotation(tree.symbol) then + List(tree) + else if tree.symbol.is(Module) && !tree.symbol.isClass then + // only class is transformed + List(tree) + else if tree.symbol.isType && !tree.symbol.isClass then + report.error("macro annotations are not supported on type", tree) + List(tree) + else + debug.println(i"Expanding macro annotations of:\n$tree") + + val macroInterpreter = new Interpreter(tree.srcPos, MacroClassLoader.fromContext) + + val allTrees = List.newBuilder[DefTree] + var insertedAfter: List[List[DefTree]] = Nil + + // Apply all macro annotation to `tree` and collect new definitions in order + val transformedTree: DefTree = tree.symbol.annotations.foldLeft(tree) { (tree, annot) => + if isMacroAnnotation(annot) then + debug.println(i"Expanding macro annotation: ${annot}") + + // Interpret call to `new myAnnot(..).transform(using )()` + val transformedTrees = + try callMacro(macroInterpreter, tree, annot) + catch + // TODO: Replace this case when scala.annaotaion.MacroAnnotation is no longer experimental and reflectiveSelectable is not used + // Replace this case with the nested cases. + case ex0: InvocationTargetException => + ex0.getCause match + case ex: scala.quoted.runtime.StopMacroExpansion => + if !ctx.reporter.hasErrors then + report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", annot.tree) + List(tree) + case Interpreter.MissingClassDefinedInCurrentRun(sym) => + Interpreter.suspendOnMissing(sym, annot.tree) + case NonFatal(ex) => + val stack0 = ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.MacroAnnotations") + val stack = stack0.take(1 + stack0.lastIndexWhere(_.getMethodName == "transform")) + val msg = + em"""Failed to evaluate macro. + | Caused by ${ex.getClass}: ${if (ex.getMessage == null) "" else ex.getMessage} + | ${stack.mkString("\n ")} + |""" + report.error(msg, annot.tree) + List(tree) + case _ => + throw ex0 + transformedTrees.span(_.symbol != tree.symbol) match + case (prefixed, newTree :: suffixed) => + allTrees ++= prefixed + insertedAfter = suffixed :: insertedAfter + prefixed.foreach(checkMacroDef(_, tree, annot)) + suffixed.foreach(checkMacroDef(_, tree, annot)) + transform.TreeChecker.checkMacroGeneratedTree(tree, newTree) + newTree + case (Nil, Nil) => + report.error(i"Unexpected `Nil` returned by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) + tree + case (_, Nil) => + report.error(i"Transformed tree for ${tree} was not return by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) + tree + else + tree + } + + allTrees += transformedTree + insertedAfter.foreach(allTrees.++=) + + val result = allTrees.result() + debug.println(result.map(_.show).mkString("expanded to:\n", "\n", "")) + result + + /** Interpret the code `new annot(..).transform(using )()` */ + private def callMacro(interpreter: Interpreter, tree: MemberDef, annot: Annotation)(using Context): List[MemberDef] = + // TODO: Remove when scala.annaotaion.MacroAnnotation is no longer experimental + import scala.reflect.Selectable.reflectiveSelectable + type MacroAnnotation = { + def transform(using Quotes)(tree: Object/*Erased type of quotes.refelct.Definition*/): List[MemberDef /*quotes.refelct.Definition known to be MemberDef in QuotesImpl*/] + } + + // Interpret macro annotation instantiation `new myAnnot(..)` + val annotInstance = interpreter.interpret[MacroAnnotation](annot.tree).get + // TODO: Remove when scala.annaotaion.MacroAnnotation is no longer experimental + assert(annotInstance.getClass.getClassLoader.loadClass("scala.annotation.MacroAnnotation").isInstance(annotInstance)) + + val quotes = QuotesImpl()(using SpliceScope.contextWithNewSpliceScope(tree.symbol.sourcePos)(using MacroExpansion.context(tree)).withOwner(tree.symbol.owner)) + annotInstance.transform(using quotes)(tree.asInstanceOf[quotes.reflect.Definition]) + + /** Check that this tree can be added by the macro annotation */ + private def checkMacroDef(newTree: DefTree, annotatedTree: Tree, annot: Annotation)(using Context) = + transform.TreeChecker.checkMacroGeneratedTree(annotatedTree, newTree) + val sym = newTree.symbol + val annotated = annotatedTree.symbol + if sym.isType && !sym.isClass then + report.error(i"macro annotation cannot return a `type`. $annot tried to add $sym", annot.tree) + else if sym.owner != annotated.owner && !(annotated.owner.isPackageObject && (sym.isClass || sym.is(Module)) && sym.owner == annotated.owner.owner) then + report.error(i"macro annotation $annot added $sym with an inconsistent owner. Expected it to be owned by ${annotated.owner} but was owned by ${sym.owner}.", annot.tree) + else if annotated.isClass && annotated.owner.is(Package) /*&& !sym.isClass*/ then + report.error(i"macro annotation can not add top-level ${sym.showKind}. $annot tried to add $sym.", annot.tree) + +object MacroAnnotations: + + /** Is this an annotation that implements `scala.annation.MacroAnnotation` */ + def isMacroAnnotation(annot: Annotation)(using Context): Boolean = + annot.tree.symbol.maybeOwner.derivesFrom(defn.MacroAnnotationClass) + + /** Is this symbol annotated with an annotation that implements `scala.annation.MacroAnnotation` */ + def hasMacroAnnotation(sym: Symbol)(using Context): Boolean = + sym.getAnnotation(defn.MacroAnnotationClass).isDefined diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala index 27ccd622bc65..7bb7ed365ebe 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -38,10 +38,10 @@ abstract class MacroTransform extends Phase { tree case _: PackageDef | _: MemberDef => super.transform(tree)(using localCtx(tree)) - case impl @ Template(constr, parents, self, _) => + case impl @ Template(constr, _, self, _) => cpy.Template(tree)( transformSub(constr), - transform(parents)(using ctx.superCallContext), + transform(impl.parents)(using ctx.superCallContext), Nil, transformSelf(self), transformStats(impl.body, tree.symbol)) diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala index 9d241216bdaa..b4e8c3acbc5c 100644 --- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala @@ -5,6 +5,7 @@ package transform import core._ import Contexts._, Phases._, Symbols._, Decorators._ import Flags.PackageVal +import staging.StagingLevel.* /** A MegaPhase combines a number of mini-phases which are all executed in * a single tree traversal. @@ -66,6 +67,8 @@ object MegaPhase { def prepareForTry(tree: Try)(using Context): Context = ctx def prepareForSeqLiteral(tree: SeqLiteral)(using Context): Context = ctx def prepareForInlined(tree: Inlined)(using Context): Context = ctx + def prepareForQuote(tree: Quote)(using Context): Context = ctx + def prepareForSplice(tree: Splice)(using Context): Context = ctx def prepareForTypeTree(tree: TypeTree)(using Context): Context = ctx def prepareForBind(tree: Bind)(using Context): Context = ctx def prepareForAlternative(tree: Alternative)(using Context): Context = ctx @@ -100,6 +103,8 @@ object MegaPhase { def transformTry(tree: Try)(using Context): Tree = tree def transformSeqLiteral(tree: SeqLiteral)(using Context): Tree = tree def transformInlined(tree: Inlined)(using Context): Tree = tree + def transformQuote(tree: Quote)(using Context): Tree = tree + def transformSplice(tree: Splice)(using Context): Tree = tree def transformTypeTree(tree: TypeTree)(using Context): Tree = tree def transformBind(tree: Bind)(using Context): Tree = tree def transformAlternative(tree: Alternative)(using Context): Tree = tree @@ -394,6 +399,16 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { val expansion = transformTree(tree.expansion, start)(using inlineContext(tree.call)) goInlined(cpy.Inlined(tree)(tree.call, bindings, expansion), start) } + case tree: Quote => + inContext(prepQuote(tree, start)(using outerCtx)) { + val body = transformTree(tree.body, start)(using quoteContext) + goQuote(cpy.Quote(tree)(body, Nil), start) + } + case tree: Splice => + inContext(prepSplice(tree, start)(using outerCtx)) { + val expr = transformTree(tree.expr, start)(using spliceContext) + goSplice(cpy.Splice(tree)(expr), start) + } case tree: Return => inContext(prepReturn(tree, start)(using outerCtx)) { val expr = transformTree(tree.expr, start) @@ -456,7 +471,7 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { } def transformTrees(trees: List[Tree], start: Int)(using Context): List[Tree] = - trees.mapInline(transformTree(_, start)) + trees.flattenedMapConserve(transformTree(_, start)) def transformSpecificTrees[T <: Tree](trees: List[T], start: Int)(using Context): List[T] = transformTrees(trees, start).asInstanceOf[List[T]] @@ -546,6 +561,10 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { private val nxSeqLiteralTransPhase = init("transformSeqLiteral") private val nxInlinedPrepPhase = init("prepareForInlined") private val nxInlinedTransPhase = init("transformInlined") + private val nxQuotePrepPhase = init("prepareForQuote") + private val nxQuoteTransPhase = init("transformQuote") + private val nxSplicePrepPhase = init("prepareForPrep") + private val nxSpliceTransPhase = init("transformSplice") private val nxTypeTreePrepPhase = init("prepareForTypeTree") private val nxTypeTreeTransPhase = init("transformTypeTree") private val nxBindPrepPhase = init("prepareForBind") @@ -893,6 +912,36 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { } } + def prepQuote(tree: Quote, start: Int)(using Context): Context = { + val phase = nxQuotePrepPhase(start) + if (phase == null) ctx + else prepQuote(tree, phase.idxInGroup + 1)(using phase.prepareForQuote(tree)) + } + + def goQuote(tree: Quote, start: Int)(using Context): Tree = { + val phase = nxQuoteTransPhase(start) + if (phase == null) tree + else phase.transformQuote(tree) match { + case tree1: Quote => goQuote(tree1, phase.idxInGroup + 1) + case tree1 => transformNode(tree1, phase.idxInGroup + 1) + } + } + + def prepSplice(tree: Splice, start: Int)(using Context): Context = { + val phase = nxSplicePrepPhase(start) + if (phase == null) ctx + else prepSplice(tree, phase.idxInGroup + 1)(using phase.prepareForSplice(tree)) + } + + def goSplice(tree: Splice, start: Int)(using Context): Tree = { + val phase = nxSpliceTransPhase(start) + if (phase == null) tree + else phase.transformSplice(tree) match { + case tree1: Splice => goSplice(tree1, phase.idxInGroup + 1) + case tree1 => transformNode(tree1, phase.idxInGroup + 1) + } + } + def prepTypeTree(tree: TypeTree, start: Int)(using Context): Context = { val phase = nxTypeTreePrepPhase(start) if (phase == null) ctx diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala index 6456066bfdb0..03ac15b39ffe 100644 --- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala +++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala @@ -4,7 +4,7 @@ package transform import core._ import DenotTransformers._ import Contexts._ -import Phases.phaseOf +import Phases.* import SymDenotations.SymDenotation import Denotations._ import Symbols._ @@ -20,8 +20,6 @@ import sjs.JSSymUtils._ import util.Store -import dotty.tools.backend.sjs.JSDefinitions.jsdefn - object Memoize { val name: String = "memoize" val description: String = "add private fields to getters and setters" @@ -114,26 +112,10 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => flags = Private | (if (sym.is(StableRealizable)) EmptyFlags else Mutable), info = fieldType, coord = tree.span - ).withAnnotationsCarrying(sym, defn.FieldMetaAnnot) + ).withAnnotationsCarrying(sym, defn.FieldMetaAnnot, orNoneOf = defn.MetaAnnots) .enteredAfter(thisPhase) } - def addAnnotations(denot: Denotation): Unit = - denot match { - case fieldDenot: SymDenotation if sym.annotations.nonEmpty => - val cpy = fieldDenot.copySymDenotation() - cpy.annotations = sym.annotations - cpy.installAfter(thisPhase) - case _ => () - } - - def removeUnwantedAnnotations(denot: SymDenotation, metaAnnotSym: ClassSymbol): Unit = - if (sym.annotations.nonEmpty) { - val cpy = sym.copySymDenotation() - cpy.filterAnnotations(_.symbol.hasAnnotation(metaAnnotSym)) - cpy.installAfter(thisPhase) - } - val NoFieldNeeded = Lazy | Deferred | JavaDefined | Inline def erasedBottomTree(sym: Symbol) = @@ -146,32 +128,17 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => } if sym.is(Accessor, butNot = NoFieldNeeded) then - /* Tests whether the semantics of Scala.js require a field for this symbol, irrespective of any - * optimization we think we can do. This is the case if one of the following is true: - * - it is a member of a JS type, since it needs to be visible as a JavaScript field - * - is is exported as static member of the companion class, since it needs to be visible as a JavaScript static field - * - it is exported to the top-level, since that can only be done as a true top-level variable, i.e., a field - */ - def sjsNeedsField: Boolean = - ctx.settings.scalajs.value && ( - sym.owner.isJSType - || sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot) - || sym.hasAnnotation(jsdefn.JSExportStaticAnnot) - ) - def adaptToField(field: Symbol, tree: Tree): Tree = if (tree.isEmpty) tree else tree.ensureConforms(field.info.widen) def isErasableBottomField(field: Symbol, cls: Symbol): Boolean = !field.isVolatile && ((cls eq defn.NothingClass) || (cls eq defn.NullClass) || (cls eq defn.BoxedUnitClass)) - && !sjsNeedsField + && !sym.sjsNeedsField if sym.isGetter then - val constantFinalVal = - sym.isAllOf(Accessor | Final, butNot = Mutable) && tree.rhs.isInstanceOf[Literal] && !sjsNeedsField - if constantFinalVal then - // constant final vals do not need to be transformed at all, and do not need a field + if sym.isConstExprFinalVal then + // const-expr final vals do not need to be transformed at all, and do not need a field tree else val field = newField.asTerm @@ -183,8 +150,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => if isErasableBottomField(field, rhsClass) then erasedBottomTree(rhsClass) else transformFollowingDeep(ref(field))(using ctx.withOwner(sym)) val getterDef = cpy.DefDef(tree)(rhs = getterRhs) - addAnnotations(fieldDef.denot) - removeUnwantedAnnotations(sym, defn.GetterMetaAnnot) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot)) Thicket(fieldDef, getterDef) else if sym.isSetter then if (!sym.is(ParamAccessor)) { val Literal(Constant(())) = tree.rhs: @unchecked } // This is intended as an assertion @@ -210,7 +176,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => then Literal(Constant(())) else Assign(ref(field), adaptToField(field, ref(tree.termParamss.head.head.symbol))) val setterDef = cpy.DefDef(tree)(rhs = transformFollowingDeep(initializer)(using ctx.withOwner(sym))) - removeUnwantedAnnotations(sym, defn.SetterMetaAnnot) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.SetterMetaAnnot)) setterDef else // Curiously, some accessors from Scala2 have ' ' suffixes. diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala index 99702686edf8..db96aeefe231 100644 --- a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala +++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala @@ -46,7 +46,7 @@ class MoveStatics extends MiniPhase with SymTransformer { if (staticFields.nonEmpty) { /* do NOT put Flags.JavaStatic here. It breaks .enclosingClass */ val staticCostructor = newSymbol(orig.symbol, nme.STATIC_CONSTRUCTOR, Flags.Synthetic | Flags.Method | Flags.Private, MethodType(Nil, defn.UnitType)) - staticCostructor.addAnnotation(Annotation(defn.ScalaStaticAnnot)) + staticCostructor.addAnnotation(Annotation(defn.ScalaStaticAnnot, staticCostructor.span)) staticCostructor.entered val staticAssigns = staticFields.map(x => Assign(ref(x.symbol), x.rhs.changeOwner(x.symbol, staticCostructor))) diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala index 7e1ae9e661f6..a75d6da9dd6a 100644 --- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala +++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala @@ -6,6 +6,7 @@ import Contexts._, Symbols._, Types._, Flags._, StdNames._ import MegaPhase._ import NameKinds.NonLocalReturnKeyName import config.SourceVersion.* +import Decorators.em object NonLocalReturns { import ast.tpd._ @@ -96,7 +97,7 @@ class NonLocalReturns extends MiniPhase { override def transformReturn(tree: Return)(using Context): Tree = if isNonLocalReturn(tree) then report.gradualErrorOrMigrationWarning( - "Non local returns are no longer supported; use scala.util.control.NonLocalReturns instead", + em"Non local returns are no longer supported; use `boundary` and `boundary.break` in `scala.util` instead", tree.srcPos, warnFrom = `3.2`, errorFrom = future) diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index b27a75436d86..48dc7c818360 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -200,10 +200,13 @@ object OverridingPairs: /** Let `member` and `other` be members of some common class C with types * `memberTp` and `otherTp` in C. Are the two symbols considered an overriding * pair in C? We assume that names already match so we test only the types here. - * @param fallBack A function called if the initial test is false and - * `member` and `other` are term symbols. + * @param fallBack A function called if the initial test is false and + * `member` and `other` are term symbols. + * @param isSubType A function to be used for checking subtype relationships + * between term fields. */ - def isOverridingPair(member: Symbol, memberTp: Type, other: Symbol, otherTp: Type, fallBack: => Boolean = false)(using Context): Boolean = + def isOverridingPair(member: Symbol, memberTp: Type, other: Symbol, otherTp: Type, fallBack: => Boolean = false, + isSubType: (Type, Type) => Context ?=> Boolean = (tp1, tp2) => tp1 frozen_<:< tp2)(using Context): Boolean = if member.isType then // intersection of bounds to refined types must be nonempty memberTp.bounds.hi.hasSameKindAs(otherTp.bounds.hi) && ( @@ -222,6 +225,6 @@ object OverridingPairs: val relaxedOverriding = ctx.explicitNulls && (member.is(JavaDefined) || other.is(JavaDefined)) member.name.is(DefaultGetterName) // default getters are not checked for compatibility || memberTp.overrides(otherTp, relaxedOverriding, - member.matchNullaryLoosely || other.matchNullaryLoosely || fallBack) + member.matchNullaryLoosely || other.matchNullaryLoosely || fallBack, isSubType = isSubType) end OverridingPairs diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 70fa0e5cc513..ac1e1868f26e 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -2,23 +2,26 @@ package dotty.tools package dotc package transform -import scala.annotation.tailrec import core._ import MegaPhase._ -import collection.mutable import Symbols._, Contexts._, Types._, StdNames._, NameOps._ +import patmat.SpaceEngine import util.Spans._ import typer.Applications.* import SymUtils._ import TypeUtils.* +import Annotations.* import Flags._, Constants._ import Decorators._ import NameKinds.{PatMatStdBinderName, PatMatAltsName, PatMatResultName} import config.Printers.patmatch import reporting._ -import dotty.tools.dotc.ast._ +import ast._ import util.Property._ +import scala.annotation.tailrec +import scala.collection.mutable + /** The pattern matching transform. * After this phase, the only Match nodes remaining in the code are simple switches * where every pattern is an integer or string constant @@ -45,9 +48,8 @@ class PatternMatcher extends MiniPhase { val translated = new Translator(matchType, this).translateMatch(tree) // check exhaustivity and unreachability - val engine = new patmat.SpaceEngine - engine.checkExhaustivity(tree) - engine.checkRedundancy(tree) + SpaceEngine.checkExhaustivity(tree) + SpaceEngine.checkRedundancy(tree) translated.ensureConforms(matchType) } @@ -664,12 +666,12 @@ object PatternMatcher { val refCount = varRefCount(plan) val LetPlan(topSym, _) = plan: @unchecked - def toDrop(sym: Symbol) = initializer.get(sym) match { - case Some(rhs) => + def toDrop(sym: Symbol) = + val rhs = initializer.lookup(sym) + if rhs != null then isPatmatGenerated(sym) && refCount(sym) <= 1 && sym != topSym && isPureExpr(rhs) - case none => + else false - } object Inliner extends PlanTransform { override val treeMap = new TreeMap { @@ -707,9 +709,9 @@ object PatternMatcher { // ----- Generating trees from plans --------------- /** The condition a test plan rewrites to */ - private def emitCondition(plan: TestPlan): Tree = { + private def emitCondition(plan: TestPlan): Tree = val scrutinee = plan.scrutinee - (plan.test: @unchecked) match { + (plan.test: @unchecked) match case NonEmptyTest => constToLiteral( scrutinee @@ -737,41 +739,49 @@ object PatternMatcher { case TypeTest(tpt, trusted) => val expectedTp = tpt.tpe - // An outer test is needed in a situation like `case x: y.Inner => ...` - def outerTestNeeded: Boolean = { - def go(expected: Type): Boolean = expected match { - case tref @ TypeRef(pre: SingletonType, _) => - tref.symbol.isClass && - ExplicitOuter.needsOuterIfReferenced(tref.symbol.asClass) - case AppliedType(tpe, _) => go(tpe) - case _ => - false - } - // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest` - // generates an outer test based on `patType.prefix` with automatically dealises. - go(expectedTp.dealias) - } + def typeTest(scrut: Tree, expected: Type): Tree = + val ttest = scrut.select(defn.Any_typeTest).appliedToType(expected) + if trusted then ttest.pushAttachment(TrustedTypeTestKey, ()) + ttest - def outerTest: Tree = thisPhase.transformFollowingDeep { - val expectedOuter = singleton(expectedTp.normalizedPrefix) - val expectedClass = expectedTp.dealias.classSymbol.asClass - ExplicitOuter.ensureOuterAccessors(expectedClass) - scrutinee.ensureConforms(expectedTp) - .outerSelect(1, expectedClass.owner.typeRef) - .select(defn.Object_eq) - .appliedTo(expectedOuter) - } + /** An outer test is needed in a situation like `case x: y.Inner => ... + * or like case x: O#Inner if the owner of Inner is not a subclass of O. + * Outer tests are added here instead of in TypeTestsCasts since they + * might cause outer accessors to be added to inner classes (via ensureOuterAccessors) + * and therefore have to run before ExplicitOuter. + */ + def addOuterTest(tree: Tree, expected: Type): Tree = expected.dealias match + case tref @ TypeRef(pre, _) => + tref.symbol match + case expectedCls: ClassSymbol if ExplicitOuter.needsOuterIfReferenced(expectedCls) => + def selectOuter = + ExplicitOuter.ensureOuterAccessors(expectedCls) + scrutinee.ensureConforms(expected).outerSelect(1, expectedCls.owner.typeRef) + if pre.isSingleton then + val expectedOuter = singleton(pre) + tree.and(selectOuter.select(defn.Object_eq).appliedTo(expectedOuter)) + else if !expectedCls.isStatic + && expectedCls.owner.isType + && !expectedCls.owner.derivesFrom(pre.classSymbol) + then + val testPre = + if expected.hasAnnotation(defn.UncheckedAnnot) then + AnnotatedType(pre, Annotation(defn.UncheckedAnnot, tree.span)) + else pre + tree.and(typeTest(selectOuter, testPre)) + else tree + case _ => tree + case AppliedType(tycon, _) => + addOuterTest(tree, tycon) + case _ => + tree - expectedTp.dealias match { + expectedTp.dealias match case expectedTp: SingletonType => scrutinee.isInstance(expectedTp) // will be translated to an equality test case _ => - val typeTest = scrutinee.select(defn.Any_typeTest).appliedToType(expectedTp) - if (trusted) typeTest.pushAttachment(TrustedTypeTestKey, ()) - if (outerTestNeeded) typeTest.and(outerTest) else typeTest - } - } - } + addOuterTest(typeTest(scrutinee, expectedTp), expectedTp) + end emitCondition @tailrec private def canFallThrough(plan: Plan): Boolean = plan match { diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala index f3ae6a377aab..15a1a823589c 100644 --- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala @@ -9,17 +9,18 @@ import Contexts._ import Symbols._ import Constants._ import ast.Trees._ +import ast.untpd import ast.TreeTypeMap import SymUtils._ import NameKinds._ import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.config.ScalaRelease.* import scala.collection.mutable import dotty.tools.dotc.core.Annotations._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import dotty.tools.dotc.inlines.Inlines import scala.annotation.constructorOnly @@ -28,17 +29,15 @@ import scala.annotation.constructorOnly * * Transforms top level quote * ``` - * '{ ... - * @TypeSplice type X0 = {{ 0 | .. | contentsTpe0 | .. }} - * @TypeSplice type X2 = {{ 1 | .. | contentsTpe1 | .. }} + * '{ ... * val x1: U1 = ??? * val x2: U2 = ??? * ... - * {{{ 3 | x1 | contents0 | T0 }}} // hole + * {{{ 3 | x1 | holeContents0 | T0 }}} // hole * ... - * {{{ 4 | x2 | contents1 | T1 }}} // hole + * {{{ 4 | x2 | holeContents1 | T1 }}} // hole * ... - * {{{ 5 | x1, x2 | contents2 | T2 }}} // hole + * {{{ 5 | x1, x2 | holeContents2 | T2 }}} // hole * ... * } * ``` @@ -46,26 +45,23 @@ import scala.annotation.constructorOnly * ``` * unpickleExprV2( * pickled = [[ // PICKLED TASTY - * @TypeSplice type X0 // with bounds that do not contain captured types - * @TypeSplice type X1 // with bounds that do not contain captured types + * @TypeSplice type A // with bounds that do not contain captured types + * @TypeSplice type B // with bounds that do not contain captured types * val x1 = ??? * val x2 = ??? * ... - * {{{ 0 | x1 | | T0 }}} // hole - * ... - * {{{ 1 | x2 | | T1 }}} // hole - * ... - * {{{ 2 | x1, x2 | | T2 }}} // hole + * {{{ 0 | x1 | | T0 }}} // hole + * ... + * {{{ 1 | x2 | | T1 }}} // hole + * ... + * {{{ 2 | x1, x2 | | T2 }}} // hole * ... * ]], - * typeHole = (idx: Int, args: List[Any]) => idx match { - * case 0 => contentsTpe0.apply(args(0).asInstanceOf[Type[?]]) // beta reduced - * case 1 => contentsTpe1.apply(args(0).asInstanceOf[Type[?]]) // beta reduced - * }, + * typeHole = Seq(a, b), * termHole = (idx: Int, args: List[Any], quotes: Quotes) => idx match { - * case 3 => content0.apply(args(0).asInstanceOf[Expr[U1]]).apply(quotes) // beta reduced - * case 4 => content1.apply(args(0).asInstanceOf[Expr[U2]]).apply(quotes) // beta reduced - * case 5 => content2.apply(args(0).asInstanceOf[Expr[U1]], args(1).asInstanceOf[Expr[U2]]).apply(quotes) // beta reduced + * case 3 => holeContents0.apply(args(0).asInstanceOf[Expr[U1]]).apply(quotes) // beta reduced + * case 4 => holeContents1.apply(args(0).asInstanceOf[Expr[U2]]).apply(quotes) // beta reduced + * case 5 => holeContents2.apply(args(0).asInstanceOf[Expr[U1]], args(1).asInstanceOf[Expr[U2]]).apply(quotes) // beta reduced * }, * ) * ``` @@ -84,58 +80,41 @@ class PickleQuotes extends MacroTransform { override def checkPostCondition(tree: Tree)(using Context): Unit = tree match - case tree: RefTree if !Inlines.inInlineMethod => - assert(!tree.symbol.isQuote) - assert(!tree.symbol.isExprSplice) - case _ : TypeDef if !Inlines.inInlineMethod => - assert(!tree.symbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot), - s"${tree.symbol} should have been removed by PickledQuotes because it has a @quoteTypeTag") + case tree: Quote => + assert(Inlines.inInlineMethod) + case tree: Splice => + assert(Inlines.inInlineMethod) case _ => override def run(using Context): Unit = - if (ctx.compilationUnit.needsStaging) super.run(using freshStagingContext) + if (ctx.compilationUnit.needsStaging) super.run protected def newTransformer(using Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match - case Apply(Select(Apply(TypeApply(fn, List(tpt)), List(code)),nme.apply), List(quotes)) - if fn.symbol == defn.QuotedRuntime_exprQuote => - val (contents, codeWithHoles) = makeHoles(code) - val sourceRef = Inlines.inlineCallTrace(ctx.owner, tree.sourcePos) - val codeWithHoles2 = Inlined(sourceRef, Nil, codeWithHoles) - val pickled = PickleQuotes(quotes, codeWithHoles2, contents, tpt.tpe, false) - transform(pickled) // pickle quotes that are in the contents - case Apply(TypeApply(_, List(tpt)), List(quotes)) if tree.symbol == defn.QuotedTypeModule_of => - tpt match - case Select(t, _) if tpt.symbol == defn.QuotedType_splice => - // `Type.of[t.Underlying](quotes)` --> `t` - ref(t.symbol)(using ctx.withSource(tpt.source)).withSpan(tpt.span) - case _ => - val (contents, tptWithHoles) = makeHoles(tpt) - PickleQuotes(quotes, tptWithHoles, contents, tpt.tpe, true) - case tree: DefDef if tree.symbol.is(Macro) => - // Shrink size of the tree. The methods have already been inlined. - // TODO move to FirstTransform to trigger even without quotes - cpy.DefDef(tree)(rhs = defaultValue(tree.rhs.tpe)) - case _: DefDef if tree.symbol.isInlineMethod => + case Apply(Select(quote: Quote, nme.apply), List(quotes)) => + val (holeContents, quote1) = extractHolesContents(quote) + val quote2 = encodeTypeArgs(quote1) + val holeContents1 = holeContents.map(transform(_)) + PickleQuotes.pickle(quote2, quotes, holeContents1) + case tree: DefDef if !tree.rhs.isEmpty && tree.symbol.isInlineMethod => tree case _ => super.transform(tree) } - private def makeHoles(tree: tpd.Tree)(using Context): (List[Tree], tpd.Tree) = - + private def extractHolesContents(quote: tpd.Quote)(using Context): (List[Tree], tpd.Quote) = class HoleContentExtractor extends Transformer: - private val contents = List.newBuilder[Tree] + private val holeContents = List.newBuilder[Tree] override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match - case tree @ Hole(isTerm, _, _, content, _) => - if !content.isEmpty then - contents += content - val holeType = - if isTerm then getTermHoleType(tree.tpe) else getTypeHoleType(tree.tpe) - val hole = cpy.Hole(tree)(content = EmptyTree, TypeTree(holeType)) - if isTerm then Inlined(EmptyTree, Nil, hole).withSpan(tree.span) else hole + case tree @ Hole(isTerm, _, _, content) => + assert(isTerm) + assert(!content.isEmpty) + holeContents += content + val holeType = getTermHoleType(tree.tpe) + val hole = untpd.cpy.Hole(tree)(content = EmptyTree).withType(holeType) + cpy.Inlined(tree)(EmptyTree, Nil, hole) case tree: DefTree => val newAnnotations = tree.symbol.annotations.mapconserve { annot => annot.derivedAnnotation(transform(annot.tree)(using ctx.withOwner(tree.symbol))) @@ -155,20 +134,6 @@ class PickleQuotes extends MacroTransform { } } - /** Remove references to local types that will not be defined in this quote */ - private def getTypeHoleType(using Context) = new TypeMap() { - override def apply(tp: Type): Type = tp match - case tp: TypeRef if tp.typeSymbol.isTypeSplice => - apply(tp.dealias) - case tp @ TypeRef(pre, _) if pre == NoPrefix || pre.termSymbol.isLocal => - val hiBound = tp.typeSymbol.info match - case info: ClassInfo => info.parents.reduce(_ & _) - case info => info.hiBound - apply(hiBound) - case tp => - mapOver(tp) - } - /** Remove references to local types that will not be defined in this quote */ private def getTermHoleType(using Context) = new TypeMap() { override def apply(tp: Type): Type = tp match @@ -182,19 +147,86 @@ class PickleQuotes extends MacroTransform { mapOver(tp) } - /** Get the contents of the transformed tree */ + /** Get the holeContents of the transformed tree */ def getContents() = - val res = contents.result - contents.clear() + val res = holeContents.result + holeContents.clear() res end HoleContentExtractor val holeMaker = new HoleContentExtractor - val newTree = holeMaker.transform(tree) - (holeMaker.getContents(), newTree) - + val body1 = holeMaker.transform(quote.body) + val quote1 = cpy.Quote(quote)(body1, quote.tags) + + (holeMaker.getContents(), quote1) + end extractHolesContents + + /** Encode quote tags as holes in the quote body. + * + * ```scala + * '{ ... t.Underlying ... u.Underlying ... } + * ``` + * becomes + * ```scala + * '{ + * type T = {{ 0 | .. | .. | .. }} + * type U = {{ 1 | .. | .. | .. }} + * ... T ... U ... + * } + * ``` + */ + private def encodeTypeArgs(quote: tpd.Quote)(using Context): tpd.Quote = + if quote.tags.isEmpty then quote + else + val tdefs = quote.tags.zipWithIndex.map(mkTagSymbolAndAssignType) + val typeMapping = quote.tags.map(_.tpe).zip(tdefs.map(_.symbol.typeRef)).toMap + val typeMap = new TypeMap { + override def apply(tp: Type): Type = tp match + case TypeRef(tag: TermRef, _) if tp.typeSymbol == defn.QuotedType_splice => + typeMapping.getOrElse(tag, tp) + case _ => mapOver(tp) + } + def treeMap(tree: Tree): Tree = tree match + case Select(qual, _) if tree.symbol == defn.QuotedType_splice => + typeMapping.get(qual.tpe) match + case Some(tag) => TypeTree(tag).withSpan(tree.span) + case None => tree + case _ => tree + val body1 = new TreeTypeMap(typeMap, treeMap).transform(quote.body) + cpy.Quote(quote)(Block(tdefs, body1), quote.tags) + + private def mkTagSymbolAndAssignType(typeArg: Tree, idx: Int)(using Context): TypeDef = { + val holeType = getTypeHoleType(typeArg.tpe.select(tpnme.Underlying)) + val hole = untpd.cpy.Hole(typeArg)(isTerm = false, idx, Nil, EmptyTree).withType(holeType) + val local = newSymbol( + owner = ctx.owner, + name = UniqueName.fresh(hole.tpe.dealias.typeSymbol.name.toTypeName), + flags = Synthetic, + info = TypeAlias(typeArg.tpe.select(tpnme.Underlying)), + coord = typeArg.span + ).asType + local.addAnnotation(Annotation(defn.QuotedRuntime_SplicedTypeAnnot, typeArg.span)) + ctx.typeAssigner.assignType(untpd.TypeDef(local.name, hole), local).withSpan(typeArg.span) + } - end makeHoles + /** Remove references to local types that will not be defined in this quote */ + private def getTypeHoleType(using Context) = new TypeMap() { + override def apply(tp: Type): Type = tp match + case tp: TypeRef if tp.typeSymbol.isTypeSplice => + apply(tp.dealias) + case tp @ TypeRef(pre, _) if isLocalPath(pre) => + val hiBound = tp.typeSymbol.info match + case info: ClassInfo => info.parents.reduce(_ & _) + case info => info.hiBound + apply(hiBound) + case tp => + mapOver(tp) + + private def isLocalPath(tp: Type): Boolean = tp match + case NoPrefix => true + case tp: TermRef if !tp.symbol.is(Package) => isLocalPath(tp.prefix) + case tp => false + } } @@ -204,7 +236,10 @@ object PickleQuotes { val name: String = "pickleQuotes" val description: String = "turn quoted trees into explicit run-time data structures" - def apply(quotes: Tree, body: Tree, contents: List[Tree], originalTp: Type, isType: Boolean)(using Context) = { + def pickle(quote: Quote, quotes: Tree, holeContents: List[Tree])(using Context) = { + val body = quote.body + val bodyType = quote.bodyType + /** Helper methods to construct trees calling methods in `Quotes.reflect` based on the current `quotes` tree */ object reflect extends ReifiedReflect { val quotesTree = quotes @@ -258,7 +293,7 @@ object PickleQuotes { */ def liftedValue(lit: Literal, lifter: Symbol) = val exprType = defn.QuotedExprClass.typeRef.appliedTo(body.tpe) - ref(lifter).appliedToType(originalTp).select(nme.apply).appliedTo(lit).appliedTo(quotes) + ref(lifter).appliedToType(bodyType).select(nme.apply).appliedTo(lit).appliedTo(quotes) def pickleAsValue(lit: Literal) = { // TODO should all constants be pickled as Literals? @@ -291,24 +326,22 @@ object PickleQuotes { * this closure is always applied directly to the actual context and the BetaReduce phase removes it. */ def pickleAsTasty() = { - val pickleQuote = PickledQuotes.pickleQuote(body) + val body1 = + if body.isType then body + else Inlined(Inlines.inlineCallTrace(ctx.owner, quote.sourcePos), Nil, body) + val pickleQuote = PickledQuotes.pickleQuote(body1) val pickledQuoteStrings = pickleQuote match case x :: Nil => Literal(Constant(x)) case xs => tpd.mkList(xs.map(x => Literal(Constant(x))), TypeTree(defn.StringType)) - // TODO split holes earlier into types and terms. This all holes in each category can have consecutive indices - val (typeSplices, termSplices) = contents.zipWithIndex.partition { - _._1.tpe.derivesFrom(defn.QuotedTypeClass) - } - // This and all closures in typeSplices are removed by the BetaReduce phase val types = - if typeSplices.isEmpty then Literal(Constant(null)) // keep pickled quote without contents as small as possible - else SeqLiteral(typeSplices.map(_._1), TypeTree(defn.QuotedTypeClass.typeRef.appliedTo(WildcardType))) + if quote.tags.isEmpty then Literal(Constant(null)) // keep pickled quote without holeContents as small as possible + else SeqLiteral(quote.tags, TypeTree(defn.QuotedTypeClass.typeRef.appliedTo(TypeBounds.emptyPolyKind))) // This and all closures in termSplices are removed by the BetaReduce phase val termHoles = - if termSplices.isEmpty then Literal(Constant(null)) // keep pickled quote without contents as small as possible + if holeContents.isEmpty then Literal(Constant(null)) // keep pickled quote without holeContents as small as possible else Lambda( MethodType( @@ -316,15 +349,18 @@ object PickleQuotes { List(defn.IntType, defn.SeqType.appliedTo(defn.AnyType), defn.QuotesClass.typeRef), defn.QuotedExprClass.typeRef.appliedTo(defn.AnyType)), args => - val cases = termSplices.map { case (splice, idx) => - val defn.FunctionOf(argTypes, defn.FunctionOf(quotesType :: _, _, _, _), _, _) = splice.tpe: @unchecked + val cases = holeContents.zipWithIndex.map { case (splice, idx) => + val defn.FunctionOf(argTypes, defn.FunctionOf(quotesType :: _, _, _), _) = splice.tpe: @unchecked val rhs = { val spliceArgs = argTypes.zipWithIndex.map { (argType, i) => args(1).select(nme.apply).appliedTo(Literal(Constant(i))).asInstance(argType) } val Block(List(ddef: DefDef), _) = splice: @unchecked // TODO: beta reduce inner closure? Or wait until BetaReduce phase? - BetaReduce(ddef, spliceArgs).select(nme.apply).appliedTo(args(2).asInstance(quotesType)) + BetaReduce( + splice + .select(nme.apply).appliedToArgs(spliceArgs)) + .select(nme.apply).appliedTo(args(2).asInstance(quotesType)) } CaseDef(Literal(Constant(idx)), EmptyTree, rhs) } @@ -333,18 +369,18 @@ object PickleQuotes { case _ => Match(args(0).annotated(New(ref(defn.UncheckedAnnot.typeRef))), cases) ) - val quoteClass = if isType then defn.QuotedTypeClass else defn.QuotedExprClass - val quotedType = quoteClass.typeRef.appliedTo(originalTp) + val quoteClass = if quote.isTypeQuote then defn.QuotedTypeClass else defn.QuotedExprClass + val quotedType = quoteClass.typeRef.appliedTo(bodyType) val lambdaTpe = MethodType(defn.QuotesClass.typeRef :: Nil, quotedType) val unpickleMeth = - if isType then defn.QuoteUnpickler_unpickleTypeV2 + if quote.isTypeQuote then defn.QuoteUnpickler_unpickleTypeV2 else defn.QuoteUnpickler_unpickleExprV2 val unpickleArgs = - if isType then List(pickledQuoteStrings, types) + if quote.isTypeQuote then List(pickledQuoteStrings, types) else List(pickledQuoteStrings, types, termHoles) quotes .asInstance(defn.QuoteUnpicklerClass.typeRef) - .select(unpickleMeth).appliedToType(originalTp) + .select(unpickleMeth).appliedToType(bodyType) .appliedToArgs(unpickleArgs).withSpan(body.span) } @@ -371,8 +407,8 @@ object PickleQuotes { case Inlined(_, Nil, e) => getLiteral(e) case _ => None - if (isType) then - if contents.isEmpty && body.symbol.isPrimitiveValueClass then taggedType() + if body.isType then + if holeContents.isEmpty && body.symbol.isPrimitiveValueClass then taggedType() else pickleAsTasty() else getLiteral(body) match diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 4d9b42a36fe7..f5fe34bafc2f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import core._ @@ -11,10 +12,10 @@ import Periods._ import Phases._ import Symbols._ import Flags.Module -import reporting.{ThrowingReporter, Profile} +import reporting.{ThrowingReporter, Profile, Message} import collection.mutable -import scala.concurrent.{Future, Await, ExecutionContext} -import scala.concurrent.duration.Duration +import util.concurrent.{Executor, Future} +import compiletime.uninitialized object Pickler { val name: String = "pickler" @@ -47,7 +48,7 @@ class Pickler extends Phase { // Maps that keep a record if -Ytest-pickler is set. private val beforePickling = new mutable.HashMap[ClassSymbol, String] - private val picklers = new mutable.HashMap[ClassSymbol, TastyPickler] + private val pickledBytes = new mutable.HashMap[ClassSymbol, Array[Byte]] /** Drop any elements of this list that are linked module classes of other elements in the list */ private def dropCompanionModuleClasses(clss: List[ClassSymbol])(using Context): List[ClassSymbol] = { @@ -56,6 +57,24 @@ class Pickler extends Phase { clss.filterNot(companionModuleClasses.contains) } + /** Runs given functions with a scratch data block in a serialized fashion (i.e. + * inside a synchronized block). Scratch data is re-used between calls. + * Used to conserve on memory usage by avoiding to create scratch data for each + * pickled unit. + */ + object serialized: + val scratch = new ScratchData + def run(body: ScratchData => Array[Byte]): Array[Byte] = + synchronized { + scratch.reset() + body(scratch) + } + + private val executor = Executor[Array[Byte]]() + + private def useExecutor(using Context) = + Pickler.ParallelPickling && !ctx.settings.YtestPickler.value + override def run(using Context): Unit = { val unit = ctx.compilationUnit pickling.println(i"unpickling in run ${ctx.runId}") @@ -64,25 +83,30 @@ class Pickler extends Phase { cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree)) tree <- sliceTopLevel(unit.tpdTree, cls) do + if ctx.settings.YtestPickler.value then beforePickling(cls) = tree.show + val pickler = new TastyPickler(cls) - if ctx.settings.YtestPickler.value then - beforePickling(cls) = tree.show - picklers(cls) = pickler val treePkl = new TreePickler(pickler) treePkl.pickle(tree :: Nil) Profile.current.recordTasty(treePkl.buf.length) - val positionWarnings = new mutable.ListBuffer[String]() - val pickledF = inContext(ctx.fresh) { - Future { - treePkl.compactify() + + val positionWarnings = new mutable.ListBuffer[Message]() + def reportPositionWarnings() = positionWarnings.foreach(report.warning(_)) + + def computePickled(): Array[Byte] = inContext(ctx.fresh) { + serialized.run { scratch => + treePkl.compactify(scratch) if tree.span.exists then val reference = ctx.settings.sourceroot.value - new PositionPickler(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference) - .picklePositions(unit.source, tree :: Nil, positionWarnings) + PositionPickler.picklePositions( + pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference, + unit.source, tree :: Nil, positionWarnings, + scratch.positionBuffer, scratch.pickledIndices) if !ctx.settings.YdropComments.value then - new CommentPickler(pickler, treePkl.buf.addrOfTree, treePkl.docString) - .pickleComment(tree) + CommentPickler.pickleComments( + pickler, treePkl.buf.addrOfTree, treePkl.docString, tree, + scratch.commentBuffer) val pickled = pickler.assembleParts() @@ -93,26 +117,40 @@ class Pickler extends Phase { // println(i"rawBytes = \n$rawBytes%\n%") // DEBUG if pickling ne noPrinter then - pickling.synchronized { - println(i"**** pickled info of $cls") - println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) - } + println(i"**** pickled info of $cls") + println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) pickled - }(using ExecutionContext.global) + } } - def force(): Array[Byte] = - val result = Await.result(pickledF, Duration.Inf) - positionWarnings.foreach(report.warning(_)) - result - - if !Pickler.ParallelPickling || ctx.settings.YtestPickler.value then force() - unit.pickled += (cls -> force) + /** A function that returns the pickled bytes. Depending on `Pickler.ParallelPickling` + * either computes the pickled data in a future or eagerly before constructing the + * function value. + */ + val demandPickled: () => Array[Byte] = + if useExecutor then + val futurePickled = executor.schedule(computePickled) + () => + try futurePickled.force.get + finally reportPositionWarnings() + else + val pickled = computePickled() + reportPositionWarnings() + if ctx.settings.YtestPickler.value then pickledBytes(cls) = pickled + () => pickled + + unit.pickled += (cls -> demandPickled) end for } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - val result = super.runOn(units) + val result = + if useExecutor then + executor.start() + try super.runOn(units) + finally executor.close() + else + super.runOn(units) if ctx.settings.YtestPickler.value then val ctx2 = ctx.fresh.setSetting(ctx.settings.YreadComments, true) testUnpickler( @@ -128,8 +166,8 @@ class Pickler extends Phase { pickling.println(i"testing unpickler at run ${ctx.runId}") ctx.initialize() val unpicklers = - for ((cls, pickler) <- picklers) yield { - val unpickler = new DottyUnpickler(pickler.assembleParts()) + for ((cls, bytes) <- pickledBytes) yield { + val unpickler = new DottyUnpickler(bytes) unpickler.enter(roots = Set.empty) cls -> unpickler } @@ -147,8 +185,9 @@ class Pickler extends Phase { if unequal then output("before-pickling.txt", previous) output("after-pickling.txt", unpickled) - report.error(s"""pickling difference for $cls in ${cls.source}, for details: - | - | diff before-pickling.txt after-pickling.txt""".stripMargin) + //sys.process.Process("diff -u before-pickling.txt after-pickling.txt").! + report.error(em"""pickling difference for $cls in ${cls.source}, for details: + | + | diff before-pickling.txt after-pickling.txt""") end testSame } diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 0424b48751bc..ac3dc15092a0 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -1,4 +1,5 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package transform import dotty.tools.dotc.ast.{Trees, tpd, untpd, desugar} @@ -156,12 +157,20 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase checkInferredWellFormed(tree.tpt) if sym.is(Method) then if sym.isSetter then - removeUnwantedAnnotations(sym, defn.SetterMetaAnnot, NoSymbol, keepIfNoRelevantAnnot = false) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.SetterMetaAnnot)) + if sym.isOneOf(GivenOrImplicit) then + val cls = sym.info.finalResultType.classSymbol + if cls.isOneOf(GivenOrImplicit) then + sym.updateAnnotationsAfter(thisPhase, + atPhase(thisPhase)(cls.annotationsCarrying(Set(defn.CompanionMethodMetaAnnot))) + ++ sym.annotations) else if sym.is(Param) then - removeUnwantedAnnotations(sym, defn.ParamMetaAnnot, NoSymbol, keepIfNoRelevantAnnot = true) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.ParamMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) + else if sym.is(ParamAccessor) then + sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot)) else - removeUnwantedAnnotations(sym, defn.GetterMetaAnnot, defn.FieldMetaAnnot, keepIfNoRelevantAnnot = !sym.is(ParamAccessor)) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) if sym.isScala2Macro && !ctx.settings.XignoreScala2Macros.value then if !sym.owner.unforcedDecls.exists(p => !p.isScala2Macro && p.name == sym.name && p.signature == sym.signature) // Allow scala.reflect.materializeClassTag to be able to compile scala/reflect/package.scala @@ -183,17 +192,6 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase => Checking.checkAppliedTypesIn(tree) case _ => - private def removeUnwantedAnnotations(sym: Symbol, metaAnnotSym: Symbol, - metaAnnotSymBackup: Symbol, keepIfNoRelevantAnnot: Boolean)(using Context): Unit = - def shouldKeep(annot: Annotation): Boolean = - val annotSym = annot.symbol - annotSym.hasAnnotation(metaAnnotSym) - || annotSym.hasAnnotation(metaAnnotSymBackup) - || (keepIfNoRelevantAnnot && { - !annotSym.annotations.exists(metaAnnot => defn.FieldAccessorMetaAnnots.contains(metaAnnot.symbol)) - }) - if sym.annotations.nonEmpty then - sym.filterAnnotations(shouldKeep(_)) private def transformSelect(tree: Select, targs: List[Tree])(using Context): Tree = { val qual = tree.qualifier @@ -269,7 +267,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase def checkNotPackage(tree: Tree)(using Context): Tree = if !tree.symbol.is(Package) then tree - else errorTree(tree, i"${tree.symbol} cannot be used as a type") + else errorTree(tree, em"${tree.symbol} cannot be used as a type") override def transform(tree: Tree)(using Context): Tree = try tree match { @@ -277,7 +275,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case CaseDef(pat, _, _) => val gadtCtx = pat.removeAttachment(typer.Typer.InferredGadtConstraints) match - case Some(gadt) => ctx.fresh.setGadt(gadt) + case Some(gadt) => ctx.fresh.setGadtState(GadtState(gadt)) case None => ctx super.transform(tree)(using gadtCtx) @@ -285,16 +283,14 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase if tree.isType then checkNotPackage(tree) else - if tree.symbol.is(Inline) && !Inlines.inInlineMethod then - ctx.compilationUnit.needsInlining = true checkNoConstructorProxy(tree) + registerNeedsInlining(tree) tree.tpe match { case tpe: ThisType => This(tpe.cls).withSpan(tree.span) case _ => tree } case tree @ Select(qual, name) => - if tree.symbol.is(Inline) then - ctx.compilationUnit.needsInlining = true + registerNeedsInlining(tree) if name.isTypeName then Checking.checkRealizable(qual.tpe, qual.srcPos) withMode(Mode.Type)(super.transform(checkNotPackage(tree))) @@ -302,19 +298,23 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase checkNoConstructorProxy(tree) transformSelect(tree, Nil) case tree: Apply => - val methType = tree.fun.tpe.widen + val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] val app = - if (methType.isErasedMethod) + if (methType.hasErasedParams) tpd.cpy.Apply(tree)( tree.fun, - tree.args.mapConserve(arg => - if (methType.isImplicitMethod && arg.span.isSynthetic) - arg match - case _: RefTree | _: Apply | _: TypeApply if arg.symbol.is(Erased) => - dropInlines.transform(arg) - case _ => - PruneErasedDefs.trivialErasedTree(arg) - else dropInlines.transform(arg))) + tree.args.zip(methType.erasedParams).map((arg, isErased) => + if !isErased then arg + else + if methType.isResultDependent then + Checking.checkRealizable(arg.tpe, arg.srcPos, "erased argument") + if (methType.isImplicitMethod && arg.span.isSynthetic) + arg match + case _: RefTree | _: Apply | _: TypeApply if arg.symbol.is(Erased) => + dropInlines.transform(arg) + case _ => + PruneErasedDefs.trivialErasedTree(arg) + else dropInlines.transform(arg))) else tree def app1 = @@ -331,7 +331,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase // Check the constructor type as well; it could be an illegal singleton type // which would not be reflected as `tree.tpe` ctx.typer.checkClassType(nu.tpe, tree.srcPos, traitReq = false, stablePrefixReq = false) - Checking.checkInstantiable(tree.tpe, nu.srcPos) + Checking.checkInstantiable(tree.tpe, nu.tpe, nu.srcPos) withNoCheckNews(nu :: Nil)(app1) case _ => app1 @@ -340,10 +340,9 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase val patterns1 = transform(patterns) cpy.UnApply(tree)(transform(fun), transform(implicits), patterns1) case tree: TypeApply => - if tree.symbol.isQuote then + if tree.symbol == defn.QuotedTypeModule_of then ctx.compilationUnit.needsStaging = true - if tree.symbol.is(Inline) then - ctx.compilationUnit.needsInlining = true + registerNeedsInlining(tree) val tree1 @ TypeApply(fn, args) = normalizeTypeArgs(tree) for arg <- args do checkInferredWellFormed(arg) @@ -360,6 +359,8 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase } case Inlined(call, bindings, expansion) if !call.isEmpty => val pos = call.sourcePos + CrossVersionChecks.checkExperimentalRef(call.symbol, pos) + withMode(Mode.InlinedCall)(transform(call)) val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(call))) case templ: Template => @@ -372,33 +373,47 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase ) } case tree: ValDef => + registerIfHasMacroAnnotations(tree) checkErasedDef(tree) val tree1 = cpy.ValDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) if tree1.removeAttachment(desugar.UntupledParam).isDefined then checkStableSelection(tree.rhs) processValOrDefDef(super.transform(tree1)) case tree: DefDef => + registerIfHasMacroAnnotations(tree) checkErasedDef(tree) annotateContextResults(tree) val tree1 = cpy.DefDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) processValOrDefDef(superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef])) case tree: TypeDef => + registerIfHasMacroAnnotations(tree) val sym = tree.symbol if (sym.isClass) VarianceChecker.check(tree) annotateExperimental(sym) + checkMacroAnnotation(sym) + if sym.isOneOf(GivenOrImplicit) then + sym.keepAnnotationsCarrying(thisPhase, Set(defn.CompanionClassMetaAnnot), orNoneOf = defn.MetaAnnots) tree.rhs match case impl: Template => for parent <- impl.parents do Checking.checkTraitInheritance(parent.tpe.classSymbol, sym.asClass, parent.srcPos) + // Constructor parameters are in scope when typing a parent. + // While they can safely appear in a parent tree, to preserve + // soundness we need to ensure they don't appear in a parent + // type (#16270). + val illegalRefs = parent.tpe.namedPartsWith(p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym)) + if illegalRefs.nonEmpty then + report.error( + em"The type of a class parent cannot refer to constructor parameters, but ${parent.tpe} refers to ${illegalRefs.map(_.name.show).mkString(",")}", parent.srcPos) // Add SourceFile annotation to top-level classes if sym.owner.is(Package) then if ctx.compilationUnit.source.exists && sym != defn.SourceFileAnnot then val reference = ctx.settings.sourceroot.value val relativePath = util.SourceFile.relativePath(ctx.compilationUnit.source, reference) - sym.addAnnotation(Annotation.makeSourceFile(relativePath)) + sym.addAnnotation(Annotation.makeSourceFile(relativePath, tree.span)) if Feature.pureFunsEnabled && sym != defn.WithPureFunsAnnot then - sym.addAnnotation(Annotation(defn.WithPureFunsAnnot)) + sym.addAnnotation(Annotation(defn.WithPureFunsAnnot, tree.span)) else if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then Checking.checkGoodBounds(tree.symbol) @@ -414,7 +429,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase Checking.checkGoodBounds(tree.symbol) super.transform(tree) case tree: New if isCheckable(tree) => - Checking.checkInstantiable(tree.tpe, tree.srcPos) + Checking.checkInstantiable(tree.tpe, tree.tpe, tree.srcPos) super.transform(tree) case tree: Closure if !tree.tpt.isEmpty => Checking.checkRealizable(tree.tpt.tpe, tree.srcPos, "SAM type") @@ -434,6 +449,13 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case SingletonTypeTree(ref) => Checking.checkRealizable(ref.tpe, ref.srcPos) super.transform(tree) + case tree: TypeBoundsTree => + val TypeBoundsTree(lo, hi, alias) = tree + if !alias.isEmpty then + val bounds = TypeBounds(lo.tpe, hi.tpe) + if !bounds.contains(alias.tpe) then + report.error(em"type ${alias.tpe} outside bounds $bounds", tree.srcPos) + super.transform(tree) case tree: TypeTree => tree.withType( tree.tpe match { @@ -461,6 +483,9 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase ) case Block(_, Closure(_, _, tpt)) if ExpandSAMs.needsWrapperClass(tpt.tpe) => superAcc.withInvalidCurrentClass(super.transform(tree)) + case _: Quote => + ctx.compilationUnit.needsStaging = true + super.transform(tree) case tree => super.transform(tree) } @@ -480,6 +505,20 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase private def normalizeErasedRhs(rhs: Tree, sym: Symbol)(using Context) = if (sym.isEffectivelyErased) dropInlines.transform(rhs) else rhs + private def registerNeedsInlining(tree: Tree)(using Context): Unit = + if tree.symbol.is(Inline) && !Inlines.inInlineMethod && !ctx.mode.is(Mode.InlinedCall) then + ctx.compilationUnit.needsInlining = true + + /** Check if the definition has macro annotation and sets `compilationUnit.hasMacroAnnotations` if needed. */ + private def registerIfHasMacroAnnotations(tree: DefTree)(using Context) = + if !Inlines.inInlineMethod && MacroAnnotations.hasMacroAnnotation(tree.symbol) then + ctx.compilationUnit.hasMacroAnnotations = true + + /** Check macro annotations implementations */ + private def checkMacroAnnotation(sym: Symbol)(using Context) = + if sym.derivesFrom(defn.MacroAnnotationClass) && !sym.isStatic then + report.error("classes that extend MacroAnnotation must not be inner/local classes", sym.srcPos) + private def checkErasedDef(tree: ValOrDefDef)(using Context): Unit = if tree.symbol.is(Erased, butNot = Macro) then val tpe = tree.rhs.tpe @@ -490,8 +529,8 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase private def annotateExperimental(sym: Symbol)(using Context): Unit = if sym.is(Module) && sym.companionClass.hasAnnotation(defn.ExperimentalAnnot) then - sym.addAnnotation(defn.ExperimentalAnnot) - sym.companionModule.addAnnotation(defn.ExperimentalAnnot) + sym.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) + sym.companionModule.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) } } diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala index 98e835293303..6d8f7bdb32cb 100644 --- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala @@ -70,7 +70,7 @@ class ProtectedAccessors extends MiniPhase { override def ifNoHost(reference: RefTree)(using Context): Tree = { val curCls = ctx.owner.enclosingClass transforms.println(i"${curCls.ownersIterator.toList}%, %") - report.error(i"illegal access to protected ${reference.symbol.showLocated} from $curCls", + report.error(em"illegal access to protected ${reference.symbol.showLocated} from $curCls", reference.srcPos) reference } diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala index 568512207fde..17f2d11ccfec 100644 --- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala @@ -13,6 +13,7 @@ import ast.tpd import SymUtils._ import config.Feature import Decorators.* +import dotty.tools.dotc.core.Types.MethodType /** This phase makes all erased term members of classes private so that they cannot * conflict with non-erased members. This is needed so that subsequent phases like @@ -38,8 +39,11 @@ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => else sym.copySymDenotation(initFlags = sym.flags | Private) override def transformApply(tree: Apply)(using Context): Tree = - if !tree.fun.tpe.widen.isErasedMethod then tree - else cpy.Apply(tree)(tree.fun, tree.args.map(trivialErasedTree)) + tree.fun.tpe.widen match + case mt: MethodType if mt.hasErasedParams => + cpy.Apply(tree)(tree.fun, tree.args.zip(mt.erasedParams).map((a, e) => if e then trivialErasedTree(a) else a)) + case _ => + tree override def transformValDef(tree: ValDef)(using Context): Tree = checkErasedInExperimental(tree.symbol) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 6d783854ae35..527c73d02250 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -4,7 +4,7 @@ package transform import core.* import Symbols.*, Contexts.*, Types.*, ContextOps.*, Decorators.*, SymDenotations.* -import Flags.*, SymUtils.*, NameKinds.*, Denotations.Denotation +import Flags.*, SymUtils.*, NameKinds.*, Denotations.{Denotation, SingleDenotation} import ast.* import Names.Name import Phases.Phase @@ -22,6 +22,7 @@ import StdNames.nme import reporting.trace import annotation.constructorOnly import cc.CaptureSet.IdempotentCaptRefMap +import annotation.tailrec object Recheck: import tpd.* @@ -71,7 +72,7 @@ object Recheck: val symd = sym.denot symd.validFor.firstPhaseId == phase.id + 1 && (sym.originDenotation ne symd) - extension (tree: Tree) + extension [T <: Tree](tree: T) /** Remember `tpe` as the type of `tree`, which might be different from the * type stored in the tree itself, unless a type was already remembered for `tree`. @@ -86,11 +87,27 @@ object Recheck: if tpe ne tree.tpe then tree.putAttachment(RecheckedType, tpe) /** The remembered type of the tree, or if none was installed, the original type */ - def knownType = + def knownType: Type = tree.attachmentOrElse(RecheckedType, tree.tpe) def hasRememberedType: Boolean = tree.hasAttachment(RecheckedType) + def withKnownType(using Context): T = tree.getAttachment(RecheckedType) match + case Some(tpe) => tree.withType(tpe).asInstanceOf[T] + case None => tree + + extension (tpe: Type) + + /** Map ExprType => T to () ?=> T (and analogously for pure versions). + * Even though this phase runs after ElimByName, ExprTypes can still occur + * as by-name arguments of applied types. See note in doc comment for + * ElimByName phase. Test case is bynamefun.scala. + */ + def mapExprType(using Context): Type = tpe match + case ExprType(rt) => defn.ByNameFunction(rt) + case _ => tpe + + /** A base class that runs a simplified typer pass over an already re-typed program. The pass * does not transform trees but returns instead the re-typed type of each tree as it is * traversed. The Recheck phase must be directly preceded by a phase of type PreRecheck. @@ -116,7 +133,9 @@ abstract class Recheck extends Phase, SymTransformer: else sym def run(using Context): Unit = - newRechecker().checkUnit(ctx.compilationUnit) + val rechecker = newRechecker() + rechecker.checkUnit(ctx.compilationUnit) + rechecker.reset() def newRechecker()(using Context): Rechecker @@ -136,6 +155,12 @@ abstract class Recheck extends Phase, SymTransformer: */ def keepType(tree: Tree): Boolean = keepAllTypes + private val prevSelDenots = util.HashMap[NamedType, Denotation]() + + def reset()(using Context): Unit = + for (ref, mbr) <- prevSelDenots.iterator do + ref.withDenot(mbr) + /** Constant-folded rechecked type `tp` of tree `tree` */ protected def constFold(tree: Tree, tp: Type)(using Context): Type = val tree1 = tree.withType(tp) @@ -147,18 +172,42 @@ abstract class Recheck extends Phase, SymTransformer: def recheckSelect(tree: Select, pt: Type)(using Context): Type = val Select(qual, name) = tree - recheckSelection(tree, recheck(qual, AnySelectionProto).widenIfUnstable, name, pt) + val proto = + if tree.symbol == defn.Any_asInstanceOf then WildcardType + else AnySelectionProto + recheckSelection(tree, recheck(qual, proto).widenIfUnstable, name, pt) + + /** When we select the `apply` of a function with type such as `(=> A) => B`, + * we need to convert the parameter type `=> A` to `() ?=> A`. See doc comment + * of `mapExprType`. + */ + def normalizeByName(mbr: SingleDenotation)(using Context): SingleDenotation = mbr.info match + case mt: MethodType if mt.paramInfos.exists(_.isInstanceOf[ExprType]) => + mbr.derivedSingleDenotation(mbr.symbol, + mt.derivedLambdaType(paramInfos = mt.paramInfos.map(_.mapExprType))) + case _ => + mbr def recheckSelection(tree: Select, qualType: Type, name: Name, sharpen: Denotation => Denotation)(using Context): Type = if name.is(OuterSelectName) then tree.tpe else //val pre = ta.maybeSkolemizePrefix(qualType, name) - val mbr = sharpen( + val mbr = normalizeByName( + sharpen( qualType.findMember(name, qualType, excluded = if tree.symbol.is(Private) then EmptyFlags else Private - )).suchThat(tree.symbol == _) - constFold(tree, qualType.select(name, mbr)) + )).suchThat(tree.symbol == _)) + val newType = tree.tpe match + case prevType: NamedType => + val prevDenot = prevType.denot + val newType = qualType.select(name, mbr) + if (newType eq prevType) && (mbr.info ne prevDenot.info) && !prevSelDenots.contains(prevType) then + prevSelDenots(prevType) = prevDenot + newType + case _ => + qualType.select(name, mbr) + constFold(tree, newType) //.showing(i"recheck select $qualType . $name : ${mbr.info} = $result") @@ -212,7 +261,10 @@ abstract class Recheck extends Phase, SymTransformer: mt.instantiate(argTypes) def recheckApply(tree: Apply, pt: Type)(using Context): Type = - recheck(tree.fun).widen match + val funTp = recheck(tree.fun) + // reuse the tree's type on signature polymorphic methods, instead of using the (wrong) rechecked one + val funtpe = if tree.fun.symbol.originalSignaturePolymorphic.exists then tree.fun.tpe else funTp + funtpe.widen match case fntpe: MethodType => assert(fntpe.paramInfos.hasSameLengthAs(tree.args)) val formals = @@ -220,7 +272,7 @@ abstract class Recheck extends Phase, SymTransformer: else fntpe.paramInfos def recheckArgs(args: List[Tree], formals: List[Type], prefs: List[ParamRef]): List[Type] = args match case arg :: args1 => - val argType = recheck(arg, formals.head) + val argType = recheck(arg, formals.head.mapExprType) val formals1 = if fntpe.isParamDependent then formals.tail.map(_.substParam(prefs.head, argType)) @@ -232,6 +284,8 @@ abstract class Recheck extends Phase, SymTransformer: val argTypes = recheckArgs(tree.args, formals, fntpe.paramRefs) constFold(tree, instantiate(fntpe, argTypes, tree.fun.symbol)) //.showing(i"typed app $tree : $fntpe with ${tree.args}%, % : $argTypes%, % = $result") + case tp => + assert(false, i"unexpected type of ${tree.fun}: $funtpe") def recheckTypeApply(tree: TypeApply, pt: Type)(using Context): Type = recheck(tree.fun).widen match @@ -262,7 +316,7 @@ abstract class Recheck extends Phase, SymTransformer: recheckBlock(tree.stats, tree.expr, pt) def recheckInlined(tree: Inlined, pt: Type)(using Context): Type = - recheckBlock(tree.bindings, tree.expansion, pt) + recheckBlock(tree.bindings, tree.expansion, pt)(using inlineContext(tree.call)) def recheckIf(tree: If, pt: Type)(using Context): Type = recheck(tree.cond, defn.BooleanType) @@ -297,7 +351,20 @@ abstract class Recheck extends Phase, SymTransformer: val rawType = recheck(tree.expr) val ownType = avoidMap(rawType) - checkConforms(ownType, tree.from.symbol.returnProto, tree) + + // The pattern matching translation, which runs before this phase + // sometimes instantiates return types with singleton type alternatives + // but the returned expression is widened. We compensate by widening the expected + // type as well. See also `widenSkolems` in `checkConformsExpr` which fixes + // a more general problem. It turns out that pattern matching returns + // are not checked by Ycheck, that's why these problems were allowed to slip + // through. + def widened(tp: Type): Type = tp match + case tp: SingletonType => tp.widen + case tp: AndOrType => tp.derivedAndOrType(widened(tp.tp1), widened(tp.tp2)) + case tp @ AnnotatedType(tp1, ann) => tp.derivedAnnotatedType(widened(tp1), ann) + case _ => tp + checkConforms(ownType, widened(tree.from.symbol.returnProto), tree) defn.NothingType end recheckReturn @@ -339,7 +406,14 @@ abstract class Recheck extends Phase, SymTransformer: NoType def recheckStats(stats: List[Tree])(using Context): Unit = - stats.foreach(recheck(_)) + @tailrec def traverse(stats: List[Tree])(using Context): Unit = stats match + case (imp: Import) :: rest => + traverse(rest)(using ctx.importContext(imp, imp.symbol)) + case stat :: rest => + recheck(stat) + traverse(rest) + case _ => + traverse(stats) def recheckDef(tree: ValOrDefDef, sym: Symbol)(using Context): Unit = inContext(ctx.localContext(tree, sym)) { @@ -423,6 +497,27 @@ abstract class Recheck extends Phase, SymTransformer: throw ex } + /** Typing and previous transforms sometiems leaves skolem types in prefixes of + * NamedTypes in `expected` that do not match the `actual` Type. -Ycheck does + * not complain (need to find out why), but a full recheck does. We compensate + * by de-skolemizing everywhere in `expected` except when variance is negative. + * @return If `tp` contains SkolemTypes in covariant or invariant positions, + * the type where these SkolemTypes are mapped to their underlying type. + * Otherwise, `tp` itself + */ + def widenSkolems(tp: Type)(using Context): Type = + object widenSkolems extends TypeMap, IdempotentCaptRefMap: + var didWiden: Boolean = false + def apply(t: Type): Type = t match + case t: SkolemType if variance >= 0 => + didWiden = true + apply(t.underlying) + case t: LazyRef => t + case t @ AnnotatedType(t1, ann) => t.derivedAnnotatedType(apply(t1), ann) + case _ => mapOver(t) + val tp1 = widenSkolems(tp) + if widenSkolems.didWiden then tp1 else tp + /** If true, print info for some successful checkConforms operations (failing ones give * an error message in any case). */ @@ -438,11 +533,16 @@ abstract class Recheck extends Phase, SymTransformer: def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = //println(i"check conforms $actual <:< $expected") - val isCompatible = + + def isCompatible(expected: Type): Boolean = actual <:< expected || expected.isRepeatedParam - && actual <:< expected.translateFromRepeated(toArray = tree.tpe.isRef(defn.ArrayClass)) - if !isCompatible then + && isCompatible(expected.translateFromRepeated(toArray = tree.tpe.isRef(defn.ArrayClass))) + || { + val widened = widenSkolems(expected) + (widened ne expected) && isCompatible(widened) + } + if !isCompatible(expected) then recheckr.println(i"conforms failed for ${tree}: $actual vs $expected") err.typeMismatch(tree.withType(actual), expected) else if debugSuccesses then @@ -450,6 +550,7 @@ abstract class Recheck extends Phase, SymTransformer: case _: Ident => println(i"SUCCESS $tree:\n${TypeComparer.explained(_.isSubType(actual, expected))}") case _ => + end checkConformsExpr def checkUnit(unit: CompilationUnit)(using Context): Unit = recheck(unit.tpdTree) diff --git a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala index e462f82b1dad..6e73d683fa2c 100644 --- a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala @@ -17,7 +17,6 @@ import dotty.tools.dotc.core.Annotations._ import dotty.tools.dotc.core.Names._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import scala.annotation.constructorOnly @@ -76,8 +75,8 @@ trait ReifiedReflect: .select(defn.Quotes_reflect_TypeRepr_of) .appliedToType(tpe) .appliedTo( - ref(defn.QuotedTypeModule_of) - .appliedToType(tpe) + tpd.Quote(TypeTree(tpe), Nil) + .select(nme.apply) .appliedTo(quotesTree) ) diff --git a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala index e8f8a80e1a0d..d6c11fe36748 100644 --- a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala @@ -10,6 +10,7 @@ import Symbols.defn import Constants._ import Types._ import Decorators._ +import Flags._ import scala.collection.mutable @@ -33,7 +34,7 @@ class RepeatableAnnotations extends MiniPhase: val annsByType = stableGroupBy(annotations, _.symbol) annsByType.flatMap { case (_, a :: Nil) => a :: Nil - case (sym, anns) if sym.derivesFrom(defn.ClassfileAnnotationClass) => + case (sym, anns) if sym.is(JavaDefined) => sym.getAnnotation(defn.JavaRepeatableAnnot).flatMap(_.argumentConstant(0)) match case Some(Constant(containerTpe: Type)) => val clashingAnns = annsByType.getOrElse(containerTpe.classSymbol, Nil) @@ -44,7 +45,7 @@ class RepeatableAnnotations extends MiniPhase: Nil else val aggregated = JavaSeqLiteral(anns.map(_.tree).toList, TypeTree(sym.typeRef)) - Annotation(containerTpe, NamedArg("value".toTermName, aggregated)) :: Nil + Annotation(containerTpe, NamedArg("value".toTermName, aggregated), sym.span) :: Nil case _ => val pos = anns.head.tree.srcPos report.error("Not repeatable annotation repeated", pos) diff --git a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala index dd109ce153eb..99b6be1eea8a 100644 --- a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala +++ b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala @@ -119,6 +119,9 @@ object ResolveSuper { report.error(IllegalSuperAccessor(base, memberName, targetName, acc, accTp, other.symbol, otherTp), base.srcPos) bcs = bcs.tail } + if sym.is(Accessor) then + report.error( + em"parent ${acc.owner} has a super call which binds to the value ${sym.showFullName}. Super calls can only target methods.", base) sym.orElse { val originalName = acc.name.asTermName.originalOfSuperAccessorName report.error(em"Member method ${originalName.debugString} of mixin ${acc.owner} is missing a concrete super implementation in $base.", base.srcPos) diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala index c1f891d6293a..2248fbc8d570 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala @@ -70,7 +70,7 @@ class SpecializeFunctions extends MiniPhase { /** Dispatch to specialized `apply`s in user code when available */ override def transformApply(tree: Apply)(using Context) = tree match { - case Apply(fun: NameTree, args) if fun.name == nme.apply && args.size <= 3 && fun.symbol.owner.isType => + case Apply(fun: NameTree, args) if fun.name == nme.apply && args.size <= 3 && fun.symbol.maybeOwner.isType => val argTypes = fun.tpe.widen.firstParamTypes.map(_.widenSingleton.dealias) val retType = tree.tpe.widenSingleton.dealias val isSpecializable = diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 31c28d7b1854..741c770e2c77 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -19,6 +19,8 @@ import dotty.tools.dotc.core.Denotations.staticRef import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.quoted.Interpreter + import scala.util.control.NonFatal import dotty.tools.dotc.util.SrcPos import dotty.tools.repl.AbstractFileClassLoader @@ -32,7 +34,8 @@ import scala.quoted.runtime.impl._ /** Utility class to splice quoted expressions */ object Splicer { - import tpd._ + import tpd.* + import Interpreter.* /** Splice the Tree for a Quoted expression. `${'{xyz}}` becomes `xyz` * and for `$xyz` the tree of `xyz` is interpreted for which the @@ -41,7 +44,7 @@ object Splicer { * See: `Staging` */ def splice(tree: Tree, splicePos: SrcPos, spliceExpansionPos: SrcPos, classLoader: ClassLoader)(using Context): Tree = tree match { - case Quoted(quotedTree) => quotedTree + case Quote(quotedTree, Nil) => quotedTree case _ => val macroOwner = newSymbol(ctx.owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) try @@ -50,7 +53,7 @@ object Splicer { val oldContextClassLoader = Thread.currentThread().getContextClassLoader Thread.currentThread().setContextClassLoader(classLoader) try { - val interpreter = new Interpreter(splicePos, classLoader) + val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree val interpretedExpr = interpreter.interpret[Quotes => scala.quoted.Expr[Any]](tree) @@ -66,7 +69,7 @@ object Splicer { throw ex case ex: scala.quoted.runtime.StopMacroExpansion => if !ctx.reporter.hasErrors then - report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users to facilitate debugging when aborting a macro expansion.", splicePos) + report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", splicePos) // errors have been emitted EmptyTree case ex: StopInterpretation => @@ -74,16 +77,16 @@ object Splicer { ref(defn.Predef_undefined).withType(ErrorType(ex.msg)) case NonFatal(ex) => val msg = - s"""Failed to evaluate macro. - | Caused by ${ex.getClass}: ${if (ex.getMessage == null) "" else ex.getMessage} - | ${ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.Splicer$").drop(1).mkString("\n ")} - """.stripMargin + em"""Failed to evaluate macro. + | Caused by ${ex.getClass}: ${if (ex.getMessage == null) "" else ex.getMessage} + | ${ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.Splicer$").drop(1).mkString("\n ")} + """ report.error(msg, spliceExpansionPos) ref(defn.Predef_undefined).withType(ErrorType(msg)) } } - /** Checks that no symbol that whas generated within the macro expansion has an out of scope reference */ + /** Checks that no symbol that was generated within the macro expansion has an out of scope reference */ def checkEscapedVariables(tree: Tree, expansionOwner: Symbol)(using Context): tree.type = new TreeTraverser { private[this] var locals = Set.empty[Symbol] @@ -116,7 +119,10 @@ object Splicer { sym.exists && !sym.is(Package) && sym.owner.ownersIterator.exists(x => x == expansionOwner || // symbol was generated within this macro expansion - x.is(Macro, butNot = Method) && x.name == nme.MACROkw // symbol was generated within another macro expansion + { // symbol was generated within another macro expansion + isMacroOwner(x) && + !ctx.owner.ownersIterator.contains(x) + } ) && !locals.contains(sym) // symbol is not in current scope }.traverse(tree) @@ -130,7 +136,7 @@ object Splicer { * See: `Staging` */ def checkValidMacroBody(tree: Tree)(using Context): Unit = tree match { - case Quoted(_) => // ok + case Quote(_, Nil) => // ok case _ => type Env = Set[Symbol] @@ -149,15 +155,15 @@ object Splicer { case Block(Nil, expr) => checkIfValidArgument(expr) case Typed(expr, _) => checkIfValidArgument(expr) - case Apply(Select(Apply(fn, quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote => + case Apply(Select(Quote(body, _), nme.apply), _) => val noSpliceChecker = new TreeTraverser { def traverse(tree: Tree)(using Context): Unit = tree match - case Spliced(_) => + case Splice(_) => report.error("Quoted argument of macros may not have splices", tree.srcPos) case _ => traverseChildren(tree) } - noSpliceChecker.traverse(quoted) + noSpliceChecker.traverse(body) case Apply(TypeApply(fn, List(quoted)), _)if fn.symbol == defn.QuotedTypeModule_of => // OK @@ -197,7 +203,7 @@ object Splicer { case Typed(expr, _) => checkIfValidStaticCall(expr) - case Apply(Select(Apply(fn, quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote => + case Apply(Select(Quote(quoted, Nil), nme.apply), _) => // OK, canceled and warning emitted case Call(fn, args) @@ -219,350 +225,37 @@ object Splicer { checkIfValidStaticCall(tree)(using Set.empty) } - /** Tree interpreter that evaluates the tree */ - private class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) { - - type Env = Map[Symbol, Object] - - /** Returns the interpreted result of interpreting the code a call to the symbol with default arguments. - * Return Some of the result or None if some error happen during the interpretation. - */ - def interpret[T](tree: Tree)(implicit ct: ClassTag[T]): Option[T] = - interpretTree(tree)(Map.empty) match { - case obj: T => Some(obj) - case obj => - // TODO upgrade to a full type tag check or something similar - report.error(s"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) - None - } + /** Is this the dummy owner of a macro expansion */ + def isMacroOwner(sym: Symbol)(using Context): Boolean = + sym.is(Macro, butNot = Method) && sym.name == nme.MACROkw - def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { - case Apply(Select(Apply(TypeApply(fn, _), quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote => - val quoted1 = quoted match { - case quoted: Ident if quoted.symbol.isAllOf(InlineByNameProxy) => + /** Is this the dummy owner of a macro expansion */ + def inMacroExpansion(using Context) = + ctx.owner.ownersIterator.exists(isMacroOwner) + + /** Tree interpreter that evaluates the tree. + * Interpreter is assumed to start at quotation level -1. + */ + private class SpliceInterpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) extends Interpreter(pos, classLoader) { + + override protected def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + // Interpret level -1 quoted code `'{...}` (assumed without level 0 splices) + case Apply(Select(Quote(body, _), nme.apply), _) => + val body1 = body match { + case expr: Ident if expr.symbol.isAllOf(InlineByNameProxy) => // inline proxy for by-name parameter - quoted.symbol.defTree.asInstanceOf[DefDef].rhs - case Inlined(EmptyTree, _, quoted) => quoted - case _ => quoted + expr.symbol.defTree.asInstanceOf[DefDef].rhs + case Inlined(EmptyTree, _, body1) => body1 + case _ => body } - interpretQuote(quoted1) + new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(body1, ctx.owner)).withSpan(body1.span), SpliceScope.getCurrent) + // Interpret level -1 `Type.of[T]` case Apply(TypeApply(fn, quoted :: Nil), _) if fn.symbol == defn.QuotedTypeModule_of => - interpretTypeQuote(quoted) - - case Literal(Constant(value)) => - interpretLiteral(value) - - case tree: Ident if tree.symbol.is(Inline, butNot = Method) => - tree.tpe.widenTermRefExpr match - case ConstantType(c) => c.value.asInstanceOf[Object] - case _ => throw new StopInterpretation(em"${tree.symbol} could not be inlined", tree.srcPos) - - // TODO disallow interpreted method calls as arguments - case Call(fn, args) => - if (fn.symbol.isConstructor && fn.symbol.owner.owner.is(Package)) - interpretNew(fn.symbol, args.flatten.map(interpretTree)) - else if (fn.symbol.is(Module)) - interpretModuleAccess(fn.symbol) - else if (fn.symbol.is(Method) && fn.symbol.isStatic) { - val staticMethodCall = interpretedStaticMethodCall(fn.symbol.owner, fn.symbol) - staticMethodCall(interpretArgs(args, fn.symbol.info)) - } - else if fn.symbol.isStatic then - assert(args.isEmpty) - interpretedStaticFieldAccess(fn.symbol) - else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) - if (fn.name == nme.asInstanceOfPM) - interpretModuleAccess(fn.qualifier.symbol) - else { - val staticMethodCall = interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol) - staticMethodCall(interpretArgs(args, fn.symbol.info)) - } - else if (env.contains(fn.symbol)) - env(fn.symbol) - else if (tree.symbol.is(InlineProxy)) - interpretTree(tree.symbol.defTree.asInstanceOf[ValOrDefDef].rhs) - else - unexpectedTree(tree) - - case closureDef((ddef @ DefDef(_, ValDefs(arg :: Nil) :: Nil, _, _))) => - (obj: AnyRef) => interpretTree(ddef.rhs)(using env.updated(arg.symbol, obj)) - - // Interpret `foo(j = x, i = y)` which it is expanded to - // `val j$1 = x; val i$1 = y; foo(i = i$1, j = j$1)` - case Block(stats, expr) => interpretBlock(stats, expr) - case NamedArg(_, arg) => interpretTree(arg) - - case Inlined(_, bindings, expansion) => interpretBlock(bindings, expansion) - - case Typed(expr, _) => - interpretTree(expr) - - case SeqLiteral(elems, _) => - interpretVarargs(elems.map(e => interpretTree(e))) + new TypeImpl(QuoteUtils.changeOwnerOfTree(quoted, ctx.owner), SpliceScope.getCurrent) case _ => - unexpectedTree(tree) - } - - private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = { - def interpretArgsGroup(args: List[Tree], argTypes: List[Type]): List[Object] = - assert(args.size == argTypes.size) - val view = - for (arg, info) <- args.lazyZip(argTypes) yield - info match - case _: ExprType => () => interpretTree(arg) // by-name argument - case _ => interpretTree(arg) // by-value argument - view.toList - - fnType.dealias match - case fnType: MethodType if fnType.isErasedMethod => interpretArgs(argss, fnType.resType) - case fnType: MethodType => - val argTypes = fnType.paramInfos - assert(argss.head.size == argTypes.size) - interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, fnType.resType) - case fnType: AppliedType if defn.isContextFunctionType(fnType) => - val argTypes :+ resType = fnType.args: @unchecked - interpretArgsGroup(argss.head, argTypes) ::: interpretArgs(argss.tail, resType) - case fnType: PolyType => interpretArgs(argss, fnType.resType) - case fnType: ExprType => interpretArgs(argss, fnType.resType) - case _ => - assert(argss.isEmpty) - Nil - } - - private def interpretBlock(stats: List[Tree], expr: Tree)(implicit env: Env) = { - var unexpected: Option[Object] = None - val newEnv = stats.foldLeft(env)((accEnv, stat) => stat match { - case stat: ValDef => - accEnv.updated(stat.symbol, interpretTree(stat.rhs)(accEnv)) - case stat => - if (unexpected.isEmpty) - unexpected = Some(unexpectedTree(stat)) - accEnv - }) - unexpected.getOrElse(interpretTree(expr)(newEnv)) - } - - private def interpretQuote(tree: Tree)(implicit env: Env): Object = - new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(tree, ctx.owner)).withSpan(tree.span), SpliceScope.getCurrent) - - private def interpretTypeQuote(tree: Tree)(implicit env: Env): Object = - new TypeImpl(QuoteUtils.changeOwnerOfTree(tree, ctx.owner), SpliceScope.getCurrent) - - private def interpretLiteral(value: Any)(implicit env: Env): Object = - value.asInstanceOf[Object] - - private def interpretVarargs(args: List[Object])(implicit env: Env): Object = - args.toSeq - - private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol)(implicit env: Env): List[Object] => Object = { - val (inst, clazz) = - try - if (moduleClass.name.startsWith(str.REPL_SESSION_LINE)) - (null, loadReplLineClass(moduleClass)) - else { - val inst = loadModule(moduleClass) - (inst, inst.getClass) - } - catch - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - - val name = fn.name.asTermName - val method = getMethod(clazz, name, paramsSig(fn)) - (args: List[Object]) => stopIfRuntimeException(method.invoke(inst, args: _*), method) - } - - private def interpretedStaticFieldAccess(sym: Symbol)(implicit env: Env): Object = { - val clazz = loadClass(sym.owner.fullName.toString) - val field = clazz.getField(sym.name.toString) - field.get(null) - } - - private def interpretModuleAccess(fn: Symbol)(implicit env: Env): Object = - loadModule(fn.moduleClass) - - private def interpretNew(fn: Symbol, args: => List[Object])(implicit env: Env): Object = { - val clazz = loadClass(fn.owner.fullName.toString) - val constr = clazz.getConstructor(paramsSig(fn): _*) - constr.newInstance(args: _*).asInstanceOf[Object] - } - - private def unexpectedTree(tree: Tree)(implicit env: Env): Object = - throw new StopInterpretation("Unexpected tree could not be interpreted: " + tree, tree.srcPos) - - private def loadModule(sym: Symbol): Object = - if (sym.owner.is(Package)) { - // is top level object - val moduleClass = loadClass(sym.fullName.toString) - moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) - } - else { - // nested object in an object - val className = { - val pack = sym.topLevelClass.owner - if (pack == defn.RootPackage || pack == defn.EmptyPackageClass) sym.flatName.toString - else pack.showFullName + "." + sym.flatName - } - val clazz = loadClass(className) - clazz.getConstructor().newInstance().asInstanceOf[Object] - } - - private def loadReplLineClass(moduleClass: Symbol)(implicit env: Env): Class[?] = { - val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) - lineClassloader.loadClass(moduleClass.name.firstPart.toString) - } - - private def loadClass(name: String): Class[?] = - try classLoader.loadClass(name) - catch { - case _: ClassNotFoundException => - val msg = s"Could not find class $name in classpath" - throw new StopInterpretation(msg, pos) - } - - private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = - try clazz.getMethod(name.toString, paramClasses: _*) - catch { - case _: NoSuchMethodException => - val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" - throw new StopInterpretation(msg, pos) - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - } - - private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = - try thunk - catch { - case ex: RuntimeException => - val sw = new StringWriter() - sw.write("A runtime exception occurred while executing macro expansion\n") - sw.write(ex.getMessage) - sw.write("\n") - ex.printStackTrace(new PrintWriter(sw)) - sw.write("\n") - throw new StopInterpretation(sw.toString, pos) - case ex: InvocationTargetException => - ex.getTargetException match { - case ex: scala.quoted.runtime.StopMacroExpansion => - throw ex - case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable => - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException - case targetException => - val sw = new StringWriter() - sw.write("Exception occurred while executing macro expansion.\n") - if (!ctx.settings.Ydebug.value) { - val end = targetException.getStackTrace.lastIndexWhere { x => - x.getClassName == method.getDeclaringClass.getCanonicalName && x.getMethodName == method.getName - } - val shortStackTrace = targetException.getStackTrace.take(end + 1) - targetException.setStackTrace(shortStackTrace) - } - targetException.printStackTrace(new PrintWriter(sw)) - sw.write("\n") - throw new StopInterpretation(sw.toString, pos) - } - } - - private object MissingClassDefinedInCurrentRun { - def unapply(targetException: NoClassDefFoundError)(using Context): Option[Symbol] = { - val className = targetException.getMessage - if (className == null) None - else { - val sym = staticRef(className.toTypeName).symbol - if (sym.isDefinedInCurrentRun) Some(sym) else None - } - } - } - - /** List of classes of the parameters of the signature of `sym` */ - private def paramsSig(sym: Symbol): List[Class[?]] = { - def paramClass(param: Type): Class[?] = { - def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match { - case JavaArrayType(elemType) => arrayDepth(elemType, depth + 1) - case _ => (tpe, depth) - } - def javaArraySig(tpe: Type): String = { - val (elemType, depth) = arrayDepth(tpe, 0) - val sym = elemType.classSymbol - val suffix = - if (sym == defn.BooleanClass) "Z" - else if (sym == defn.ByteClass) "B" - else if (sym == defn.ShortClass) "S" - else if (sym == defn.IntClass) "I" - else if (sym == defn.LongClass) "J" - else if (sym == defn.FloatClass) "F" - else if (sym == defn.DoubleClass) "D" - else if (sym == defn.CharClass) "C" - else "L" + javaSig(elemType) + ";" - ("[" * depth) + suffix - } - def javaSig(tpe: Type): String = tpe match { - case tpe: JavaArrayType => javaArraySig(tpe) - case _ => - // Take the flatten name of the class and the full package name - val pack = tpe.classSymbol.topLevelClass.owner - val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." - packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString - } - - val sym = param.classSymbol - if (sym == defn.BooleanClass) classOf[Boolean] - else if (sym == defn.ByteClass) classOf[Byte] - else if (sym == defn.CharClass) classOf[Char] - else if (sym == defn.ShortClass) classOf[Short] - else if (sym == defn.IntClass) classOf[Int] - else if (sym == defn.LongClass) classOf[Long] - else if (sym == defn.FloatClass) classOf[Float] - else if (sym == defn.DoubleClass) classOf[Double] - else java.lang.Class.forName(javaSig(param), false, classLoader) - } - def getExtraParams(tp: Type): List[Type] = tp.widenDealias match { - case tp: AppliedType if defn.isContextFunctionType(tp) => - // Call context function type direct method - tp.args.init.map(arg => TypeErasure.erasure(arg)) ::: getExtraParams(tp.args.last) - case _ => Nil - } - val extraParams = getExtraParams(sym.info.finalResultType) - val allParams = TypeErasure.erasure(sym.info) match { - case meth: MethodType => meth.paramInfos ::: extraParams - case _ => extraParams - } - allParams.map(paramClass) - } - } - - - - /** Exception that stops interpretation if some issue is found */ - private class StopInterpretation(val msg: String, val pos: SrcPos) extends Exception - - object Call { - /** Matches an expression that is either a field access or an application - * It retruns a TermRef containing field accessed or a method reference and the arguments passed to it. - */ - def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = - Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) - - private object Call0 { - def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match { - case Select(Call0(fn, args), nme.apply) if defn.isContextFunctionType(fn.tpe.widenDealias.finalResultType) => - Some((fn, args)) - case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) - case fn: Select => Some((fn, Nil)) - case Apply(f @ Call0(fn, args1), args2) => - if (f.tpe.widenDealias.isErasedMethod) Some((fn, args1)) - else Some((fn, args2 :: args1)) - case TypeApply(Call0(fn, args), _) => Some((fn, args)) - case _ => None - } + super.interpretTree(tree) } } } diff --git a/compiler/src/dotty/tools/dotc/transform/Splicing.scala b/compiler/src/dotty/tools/dotc/transform/Splicing.scala index ad3f0322130d..ff5dc5042eaf 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicing.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicing.scala @@ -14,15 +14,16 @@ import util.Spans._ import SymUtils._ import NameKinds._ import dotty.tools.dotc.ast.tpd -import StagingContext._ import scala.collection.mutable import dotty.tools.dotc.core.Annotations._ import dotty.tools.dotc.core.Names._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import dotty.tools.dotc.config.ScalaRelease.* +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.QuoteTypeTags +import dotty.tools.dotc.staging.QuoteTypeTags.* import scala.annotation.constructorOnly @@ -77,7 +78,7 @@ class Splicing extends MacroTransform: override def run(using Context): Unit = if ctx.compilationUnit.needsStaging then - super.run(using freshStagingContext) + super.run protected def newTransformer(using Context): Transformer = Level0QuoteTransformer @@ -86,11 +87,9 @@ class Splicing extends MacroTransform: override def transform(tree: tpd.Tree)(using Context): tpd.Tree = assert(level == 0) tree match - case Apply(Select(Apply(TypeApply(fn,_), List(code)),nme.apply),List(quotes)) - if fn.symbol == defn.QuotedRuntime_exprQuote => - QuoteTransformer().transform(tree) - case TypeApply(_, _) if tree.symbol == defn.QuotedTypeModule_of => - QuoteTransformer().transform(tree) + case tree: Quote => + val body1 = QuoteTransformer().transform(tree.body)(using quoteContext) + cpy.Quote(tree)(body1, tree.tags) case tree: DefDef if tree.symbol.is(Inline) => // Quotes in inlined methods are only pickled after they are inlined. tree @@ -98,7 +97,6 @@ class Splicing extends MacroTransform: super.transform(tree) end Level0QuoteTransformer - /** Transforms all direct splices in the current quote and replace them with holes. */ private class QuoteTransformer() extends Transformer: /** Set of definitions in the current quote */ @@ -107,37 +105,19 @@ class Splicing extends MacroTransform: /** Number of holes created in this quote. Used for indexing holes. */ private var numHoles = 0 - /** Mapping from the term symbol of a `Type[T]` to it's hole. Used to deduplicate type holes. */ - private val typeHoles = mutable.Map.empty[Symbol, Hole] + /** Mapping from the term of a `Type[T]` to it's hole. Used to deduplicate type holes. */ + private val typeHoles = mutable.Map.empty[TermRef, Hole] override def transform(tree: tpd.Tree)(using Context): tpd.Tree = + assert(level > 0) tree match - case Apply(fn, List(splicedCode)) if fn.symbol == defn.QuotedRuntime_exprNestedSplice => - if level > 1 then - val splicedCode1 = super.transform(splicedCode)(using spliceContext) - cpy.Apply(tree)(fn, List(splicedCode1)) - else - val holeIdx = numHoles - numHoles += 1 - val splicer = SpliceTransformer(ctx.owner, quotedDefs.contains) - val newSplicedCode1 = splicer.transformSplice(splicedCode, tree.tpe, holeIdx)(using spliceContext) - val newSplicedCode2 = Level0QuoteTransformer.transform(newSplicedCode1)(using spliceContext) - newSplicedCode2 - case tree: TypeDef if tree.symbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) => - val tp @ TypeRef(qual: TermRef, _) = tree.rhs.tpe.hiBound: @unchecked - quotedDefs += tree.symbol - val hole = typeHoles.get(qual.symbol) match - case Some (hole) => cpy.Hole(hole)(content = EmptyTree) - case None => - val holeIdx = numHoles - numHoles += 1 - val hole = tpd.Hole(false, holeIdx, Nil, ref(qual), TypeTree(tp)) - typeHoles.put(qual.symbol, hole) - hole - cpy.TypeDef(tree)(rhs = hole) - case Apply(Select(Apply(TypeApply(fn,_), List(code)),nme.apply),List(quotes)) - if fn.symbol == defn.QuotedRuntime_exprQuote => - super.transform(tree)(using quoteContext) + case tree: Splice if level == 1 => + val holeIdx = numHoles + numHoles += 1 + val splicer = SpliceTransformer(ctx.owner, quotedDefs.contains) + val newSplicedCode1 = splicer.transformSplice(tree.expr, tree.tpe, holeIdx)(using spliceContext) + val newSplicedCode2 = Level0QuoteTransformer.transform(newSplicedCode1)(using spliceContext) + newSplicedCode2 case _: Template => for sym <- tree.symbol.owner.info.decls do quotedDefs += sym @@ -183,14 +163,13 @@ class Splicing extends MacroTransform: * ``` * is transformed into * ```scala - * {{{ | T2 | x, X | (x$1: Expr[T1], X$1: Type[X]) => (using Quotes) ?=> {... ${x$1} ... X$1.Underlying ...} }}} + * {{{ | T2 | x, X | (x$1: Expr[T1], X$1: Type[X]) => (using Quotes) ?=> '{... ${x$1} ... X$1.Underlying ...} }}} * ``` */ private class SpliceTransformer(spliceOwner: Symbol, isCaptured: Symbol => Boolean) extends Transformer: - private var refBindingMap = mutable.Map.empty[Symbol, (Tree, Symbol)] + private var refBindingMap = mutable.LinkedHashMap.empty[Symbol, (Tree, Symbol)] /** Reference to the `Quotes` instance of the current level 1 splice */ private var quotes: Tree | Null = null // TODO: add to the context - private var healedTypes: PCPCheckAndHeal.QuoteTypeTags | Null = null // TODO: add to the context def transformSplice(tree: tpd.Tree, tpe: Type, holeIdx: Int)(using Context): tpd.Tree = assert(level == 0) @@ -202,10 +181,18 @@ class Splicing extends MacroTransform: val ddef = DefDef(meth, List(bindings), newTree.tpe, newTree.changeOwner(ctx.owner, meth)) val fnType = defn.FunctionType(bindings.size, isContextual = false).appliedTo(bindingsTypes :+ newTree.tpe) val closure = Block(ddef :: Nil, Closure(Nil, ref(meth), TypeTree(fnType))) - tpd.Hole(true, holeIdx, refs, closure, TypeTree(tpe)) + tpd.Hole(true, holeIdx, refs, closure, tpe) override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match + case tree: Select if tree.isTerm && isCaptured(tree.symbol) => + tree.symbol.allOverriddenSymbols.find(sym => !isCaptured(sym.owner)) match + case Some(sym) => + // virtualize call on overridden symbol that is not defined in a non static class + transform(tree.qualifier.select(sym)) + case _ => + report.error(em"Can not use reference to staged local ${tree.symbol} defined in an outer quote.\n\nThis can work if ${tree.symbol.owner} would extend a top level interface that defines ${tree.symbol}.", tree) + tree case tree: RefTree => if tree.isTerm then if isCaptured(tree.symbol) then @@ -228,42 +215,25 @@ class Splicing extends MacroTransform: case tree @ Assign(lhs: RefTree, rhs) => if isCaptured(lhs.symbol) then transformSplicedAssign(tree) else super.transform(tree) - case Apply(fn, args) if fn.symbol == defn.QuotedRuntime_exprNestedSplice => - val newArgs = args.mapConserve(arg => transform(arg)(using spliceContext)) - cpy.Apply(tree)(fn, newArgs) - case Apply(sel @ Select(app @ Apply(fn, args),nme.apply), quotesArgs) - if fn.symbol == defn.QuotedRuntime_exprQuote => - args match - case List(tree: RefTree) if isCaptured(tree.symbol) => - capturedTerm(tree) - case _ => - val newArgs = withCurrentQuote(quotesArgs.head) { - if level > 1 then args.mapConserve(arg => transform(arg)(using quoteContext)) - else args.mapConserve(arg => transformLevel0QuoteContent(arg)(using quoteContext)) - } - cpy.Apply(tree)(cpy.Select(sel)(cpy.Apply(app)(fn, newArgs), nme.apply), quotesArgs) - case Apply(TypeApply(_, List(tpt)), List(quotes)) - if tree.symbol == defn.QuotedTypeModule_of && containsCapturedType(tpt.tpe) => - ref(capturedType(tpt))(using ctx.withSource(tree.source)).withSpan(tree.span) case CapturedApplication(fn, argss) => transformCapturedApplication(tree, fn, argss) + case Apply(Select(Quote(body, _), nme.apply), quotes :: Nil) if level == 0 && body.isTerm => + body match + case _: RefTree if isCaptured(body.symbol) => capturedTerm(body) + case _ => withCurrentQuote(quotes) { super.transform(tree) } + case tree: Quote if level == 0 => + if tree.body.isTerm then transformLevel0Quote(tree) + else if containsCapturedType(tree.body.tpe) then capturedPartTypes(tree) + else tree case _ => super.transform(tree) - private def transformLevel0QuoteContent(tree: Tree)(using Context): Tree = + private def transformLevel0Quote(quote: Quote)(using Context): Tree = // transform and collect new healed types - val old = healedTypes - healedTypes = new PCPCheckAndHeal.QuoteTypeTags(tree.span) - val tree1 = transform(tree) - val newHealedTypes = healedTypes.nn.getTypeTags - healedTypes = old - // add new healed types to the current, merge with existing healed types if necessary - if newHealedTypes.isEmpty then tree1 - else tree1 match - case Block(stats @ (x :: _), expr) if x.symbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) => - Block(newHealedTypes ::: stats, expr) - case _ => - Block(newHealedTypes, tree1) + val (tags, body1) = inContextWithQuoteTypeTags { + transform(quote.body)(using quoteContext) + } + cpy.Quote(quote)(body1, quote.tags ::: tags) class ArgsClause(val args: List[Tree]): def isTerm: Boolean = args.isEmpty || args.head.isTerm @@ -335,20 +305,40 @@ class Splicing extends MacroTransform: val bindingSym = refBindingMap.getOrElseUpdate(tree.symbol, (tree, newBinding))._2 ref(bindingSym) - private def capturedType(tree: Tree)(using Context): Symbol = - val tpe = tree.tpe.widenTermRefExpr - def newBinding = newSymbol( + private def newQuotedTypeClassBinding(tpe: Type)(using Context) = + newSymbol( spliceOwner, UniqueName.fresh(nme.Type).toTermName, Param, defn.QuotedTypeClass.typeRef.appliedTo(tpe), ) - val bindingSym = refBindingMap.getOrElseUpdate(tree.symbol, (TypeTree(tree.tpe), newBinding))._2 + + private def capturedType(tree: Tree)(using Context): Symbol = + val tpe = tree.tpe.widenTermRefExpr + val bindingSym = refBindingMap + .getOrElseUpdate(tree.symbol, (TypeTree(tree.tpe), newQuotedTypeClassBinding(tpe)))._2 bindingSym + private def capturedPartTypes(quote: Quote)(using Context): Tree = + val (tags, body1) = inContextWithQuoteTypeTags { + val capturePartTypes = new TypeMap { + def apply(tp: Type) = tp match { + case typeRef: TypeRef if containsCapturedType(typeRef) => + val termRef = refBindingMap + .getOrElseUpdate(typeRef.symbol, (TypeTree(typeRef), newQuotedTypeClassBinding(typeRef)))._2.termRef + val tagRef = getTagRef(termRef) + tagRef + case _ => + mapOver(tp) + } + } + TypeTree(capturePartTypes(quote.body.tpe.widenTermRefExpr)) + } + cpy.Quote(quote)(body1, quote.tags ::: tags) + private def getTagRefFor(tree: Tree)(using Context): Tree = val capturedTypeSym = capturedType(tree) - TypeTree(healedTypes.nn.getTagRef(capturedTypeSym.termRef)) + TypeTree(getTagRef(capturedTypeSym.termRef)) private def withCurrentQuote[T](newQuotes: Tree)(body: => T)(using Context): T = if level == 0 then @@ -368,18 +358,10 @@ class Splicing extends MacroTransform: body(using ctx.withOwner(meth)).changeOwner(ctx.owner, meth) } }) - ref(defn.QuotedRuntime_exprNestedSplice) - .appliedToType(tpe) - .appliedTo(Literal(Constant(null))) // Dropped when creating the Hole that contains it - .appliedTo(closure) + Splice(closure, tpe) private def quoted(expr: Tree)(using Context): Tree = - val tpe = expr.tpe.widenTermRefExpr - ref(defn.QuotedRuntime_exprQuote) - .appliedToType(tpe) - .appliedTo(expr) - .select(nme.apply) - .appliedTo(quotes.nn) + tpd.Quote(expr, Nil).select(nme.apply).appliedTo(quotes.nn) /** Helper methods to construct trees calling methods in `Quotes.reflect` based on the current `quotes` tree */ private object reflect extends ReifiedReflect { diff --git a/compiler/src/dotty/tools/dotc/transform/Staging.scala b/compiler/src/dotty/tools/dotc/transform/Staging.scala index 1de050a9a6c1..43cbe80ce8c4 100644 --- a/compiler/src/dotty/tools/dotc/transform/Staging.scala +++ b/compiler/src/dotty/tools/dotc/transform/Staging.scala @@ -6,18 +6,18 @@ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Phases._ import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StagingContext._ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.util.SrcPos import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.transform.TreeMapWithStages._ +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.CrossStageSafety +import dotty.tools.dotc.staging.HealType - - -/** Checks that the Phase Consistency Principle (PCP) holds and heals types. +/** Checks that staging level consistency holds and heals types used in higher levels. * - * Type healing consists in transforming a phase inconsistent type `T` into `${ implicitly[Type[T]] }`. + * See `CrossStageSafety` */ class Staging extends MacroTransform { import tpd._ @@ -31,29 +31,40 @@ class Staging extends MacroTransform { override def allowsImplicitSearch: Boolean = true override def checkPostCondition(tree: Tree)(using Context): Unit = - if (ctx.phase <= splicingPhase) { - // Recheck that PCP holds but do not heal any inconsistent types as they should already have been heald + if (ctx.phase <= stagingPhase) { + // Recheck that staging level consistency holds but do not heal any inconsistent types as they should already have been heald tree match { case PackageDef(pid, _) if tree.symbol.owner == defn.RootClass => - val checker = new PCPCheckAndHeal(freshStagingContext) { - override protected def tryHeal(sym: Symbol, tp: TypeRef, pos: SrcPos)(using Context): TypeRef = { - def symStr = - if (sym.is(ModuleClass)) sym.sourceModule.show - else i"${sym.name}.this" - val errMsg = s"\nin ${ctx.owner.fullName}" - assert( - ctx.owner.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) || - (sym.isType && levelOf(sym) > 0), - em"""access to $symStr from wrong staging level: - | - the definition is at level ${levelOf(sym)}, - | - but the access is at level $level.$errMsg""") + val checker = new CrossStageSafety { + override protected def healType(pos: SrcPos)(tpe: Type)(using Context) = new HealType(pos) { + override protected def tryHeal(tp: TypeRef): TypeRef = { + val sym = tp.symbol + def symStr = + if (sym.is(ModuleClass)) sym.sourceModule.show + else i"${sym.name}.this" + val errMsg = s"\nin ${ctx.owner.fullName}" + assert( + ctx.owner.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) || + (sym.isType && levelOf(sym) > 0), + em"""access to $symStr from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level.$errMsg""") - tp - } + tp + } + }.apply(tpe) } checker.transform(tree) case _ => } + } + if !Inlines.inInlineMethod then + tree match { + case tree: RefTree => + assert(level != 0 || tree.symbol != defn.QuotedTypeModule_of, + "scala.quoted.Type.of at level 0 should have been replaced with Quote AST in staging phase") + case _ => + } tree.tpe match { case tpe @ TypeRef(prefix, _) if tpe.typeSymbol.isTypeSplice => @@ -63,14 +74,14 @@ class Staging extends MacroTransform { case _ => // OK } - } + end checkPostCondition override def run(using Context): Unit = - if (ctx.compilationUnit.needsStaging) super.run(using freshStagingContext) + if (ctx.compilationUnit.needsStaging) super.run protected def newTransformer(using Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = - new PCPCheckAndHeal(ctx).transform(tree) + (new CrossStageSafety).transform(tree) } } diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index b0c8605e7dd1..b78c75d58340 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -88,7 +88,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { // Diagnostic for SI-7091 if (!accDefs.contains(clazz)) report.error( - s"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz.is(Package)}. Accessor required for ${sel} (${sel.show})", + em"Internal error: unable to store accessor definition in ${clazz}. clazz.hasPackageFlag=${clazz.is(Package)}. Accessor required for ${sel.toString} ($sel)", sel.srcPos) else accDefs(clazz) += DefDef(acc, EmptyTree).withSpan(accRange) acc @@ -109,16 +109,16 @@ class SuperAccessors(thisPhase: DenotTransformer) { if (sym.isTerm && !sym.is(Method, butNot = Accessor) && !ctx.owner.isAllOf(ParamForwarder)) // ParamForwaders as installed ParamForwarding.scala do use super calls to vals - report.error(s"super may be not be used on ${sym.underlyingSymbol}", sel.srcPos) + report.error(em"super may be not be used on ${sym.underlyingSymbol}", sel.srcPos) else if (isDisallowed(sym)) - report.error(s"super not allowed here: use this.${sel.name} instead", sel.srcPos) + report.error(em"super not allowed here: use this.${sel.name} instead", sel.srcPos) else if (sym.is(Deferred)) { val member = sym.overridingSymbol(clazz.asClass) if (!mix.name.isEmpty || !member.exists || !(member.is(AbsOverride) && member.isIncompleteIn(clazz))) report.error( - i"${sym.showLocated} is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'", + em"${sym.showLocated} is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'", sel.srcPos) else report.log(i"ok super $sel ${sym.showLocated} $member $clazz ${member.isIncompleteIn(clazz)}") } @@ -131,7 +131,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { val overriding = sym.overridingSymbol(intermediateClass) if (overriding.is(Deferred, butNot = AbsOverride) && !overriding.owner.is(Trait)) report.error( - s"${sym.showLocated} cannot be directly accessed from ${clazz} because ${overriding.owner} redeclares it as abstract", + em"${sym.showLocated} cannot be directly accessed from ${clazz} because ${overriding.owner} redeclares it as abstract", sel.srcPos) } else { @@ -174,27 +174,30 @@ class SuperAccessors(thisPhase: DenotTransformer) { val sel @ Select(qual, name) = tree: @unchecked val sym = sel.symbol - /** If an accesses to protected member of a class comes from a trait, - * or would need a protected accessor placed in a trait, we cannot - * perform the access to the protected member directly since jvm access - * restrictions require the call site to be in an actual subclass and - * traits don't count as subclasses in this respect. In this case - * we generate a super accessor instead. See SI-2296. - */ def needsSuperAccessor = ProtectedAccessors.needsAccessorIfNotInSubclass(sym) && AccessProxies.hostForAccessorOf(sym).is(Trait) qual match { case _: This if needsSuperAccessor => - /* - * A trait which extends a class and accesses a protected member - * of that class cannot implement the necessary accessor method - * because jvm access restrictions require the call site to be in - * an actual subclass and traits don't count as subclasses in this - * respect. We generate a super accessor itself, which will be fixed - * by the implementing class. See SI-2296. - */ - superAccessorCall(sel) + /* Given a protected member m defined in class C, + * and a trait T that calls m. + * + * If T extends C, then we can access it by casting + * the qualifier of the select to C. + * + * That's because the protected method is actually public, + * so we can call it. For truly protected methods, like from + * Java, we error instead of emitting the wrong code (i17021.ext-java). + * + * Otherwise, we need to go through an accessor, + * which the implementing class will provide an implementation for. + */ + if ctx.owner.enclosingClass.derivesFrom(sym.owner) then + if sym.is(JavaDefined) then + report.error(em"${ctx.owner} accesses protected $sym inside a concrete trait method: use super.${sel.name} instead", sel.srcPos) + sel + else + superAccessorCall(sel) case Super(_, mix) => transformSuperSelect(sel) case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala index 6010fe2a2a44..c02a7d90cb8c 100644 --- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala @@ -18,6 +18,8 @@ import Annotations.Annotation import Phases._ import ast.tpd.Literal +import dotty.tools.dotc.transform.sjs.JSSymUtils.sjsNeedsField + import scala.annotation.tailrec object SymUtils: @@ -259,9 +261,29 @@ object SymUtils: self.owner.info.decl(fieldName).suchThat(!_.is(Method)).symbol } + /** Is this symbol a constant expression final val? + * + * This is the case if all of the following are true: + * + * - it is a `final val`, + * - its result type is a `ConstantType`, and + * - it does not need an explicit field because of Scala.js semantics (see `JSSymUtils.sjsNeedsField`). + * + * Constant expression final vals do not need an explicit field to store + * their value. See the Memoize-Mixin-Constructors phase trio. + */ def isConstExprFinalVal(using Context): Boolean = atPhaseNoLater(erasurePhase) { - self.is(Final) && self.info.resultType.isInstanceOf[ConstantType] + self.is(Final, butNot = Mutable) && self.info.resultType.isInstanceOf[ConstantType] + } && !self.sjsNeedsField + + /** The `ConstantType` of a val known to be `isConstrExprFinalVal`. + * + * @pre `self.isConstantExprFinalVal` is true. + */ + def constExprFinalValConstantType(using Context): ConstantType = + atPhaseNoLater(erasurePhase) { + self.info.resultType.asInstanceOf[ConstantType] } def isField(using Context): Boolean = @@ -270,11 +292,8 @@ object SymUtils: def isEnumCase(using Context): Boolean = self.isAllOf(EnumCase, butNot = JavaDefined) - def annotationsCarrying(meta: ClassSymbol)(using Context): List[Annotation] = - self.annotations.filter(_.symbol.hasAnnotation(meta)) - - def withAnnotationsCarrying(from: Symbol, meta: ClassSymbol)(using Context): self.type = { - self.addAnnotations(from.annotationsCarrying(meta)) + def withAnnotationsCarrying(from: Symbol, meta: Symbol, orNoneOf: Set[Symbol] = Set.empty)(using Context): self.type = { + self.addAnnotations(from.annotationsCarrying(Set(meta), orNoneOf)) self } @@ -318,14 +337,6 @@ object SymUtils: def reachableRawTypeRef(using Context) = self.reachableTypeRef.appliedTo(self.typeParams.map(_ => TypeBounds.emptyPolyKind)) - /** Is symbol a quote operation? */ - def isQuote(using Context): Boolean = - self == defn.QuotedRuntime_exprQuote || self == defn.QuotedTypeModule_of - - /** Is symbol a term splice operation? */ - def isExprSplice(using Context): Boolean = - self == defn.QuotedRuntime_exprSplice || self == defn.QuotedRuntime_exprNestedSplice - /** Is symbol a type splice operation? */ def isTypeSplice(using Context): Boolean = self == defn.QuotedType_splice @@ -384,7 +395,7 @@ object SymUtils: if original.hasAnnotation(defn.TargetNameAnnot) then self.addAnnotation( Annotation(defn.TargetNameAnnot, - Literal(Constant(nameFn(original.targetName).toString)).withSpan(original.span))) + Literal(Constant(nameFn(original.targetName).toString)).withSpan(original.span), original.span)) /** The return type as seen from the body of this definition. It is * computed from the symbol's type by replacing param refs by param symbols. diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala index 0a9a7a83948c..48bcbaab3511 100644 --- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala @@ -13,6 +13,7 @@ import ast.untpd import ValueClasses.isDerivedValueClass import SymUtils._ import util.Property +import util.Spans.Span import config.Printers.derive import NullOpsDecorator._ @@ -155,7 +156,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { case nme.hashCode_ => chooseHashcode case nme.toString_ => toStringBody(vrefss) case nme.equals_ => equalsBody(vrefss.head.head) - case nme.canEqual_ => canEqualBody(vrefss.head.head) + case nme.canEqual_ => canEqualBody(vrefss.head.head, synthetic.span) case nme.ordinal => ordinalRef case nme.productArity => Literal(Constant(accessors.length)) case nme.productPrefix if isEnumValue => nameRef @@ -260,13 +261,13 @@ class SyntheticMembers(thisPhase: DenotTransformer) { def equalsBody(that: Tree)(using Context): Tree = { val thatAsClazz = newSymbol(ctx.owner, nme.x_0, SyntheticCase, clazzType, coord = ctx.owner.span) // x$0 def wildcardAscription(tp: Type) = Typed(Underscore(tp), TypeTree(tp)) - val pattern = Bind(thatAsClazz, wildcardAscription(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot)))) // x$0 @ (_: C @unchecked) + val pattern = Bind(thatAsClazz, wildcardAscription(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot, thatAsClazz.span)))) // x$0 @ (_: C @unchecked) // compare primitive fields first, slow equality checks of non-primitive fields can be skipped when primitives differ val sortedAccessors = accessors.sortBy(accessor => if (accessor.info.typeSymbol.isPrimitiveValueClass) 0 else 1) val comparisons = sortedAccessors.map { accessor => This(clazz).withSpan(ctx.owner.span.focus).select(accessor).equal(ref(thatAsClazz).select(accessor)) } var rhs = // this.x == this$0.x && this.y == x$0.y && that.canEqual(this) - if comparisons.isEmpty then Literal(Constant(true)) else comparisons.reduceLeft(_ and _) + if comparisons.isEmpty then Literal(Constant(true)) else comparisons.reduceBalanced(_ and _) val canEqualMeth = existingDef(defn.Product_canEqual, clazz) if !clazz.is(Final) || canEqualMeth.exists && !canEqualMeth.is(Synthetic) then rhs = rhs.and( @@ -390,7 +391,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * * `@unchecked` is needed for parametric case classes. */ - def canEqualBody(that: Tree): Tree = that.isInstance(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot))) + def canEqualBody(that: Tree, span: Span): Tree = that.isInstance(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot, span))) symbolsToSynthesize.flatMap(syntheticDefIfMissing) } diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 71b66c3d0da6..741b9d1627fe 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -4,7 +4,7 @@ package transform import ast.{TreeTypeMap, tpd} import config.Printers.tailrec import core.* -import Contexts.*, Flags.*, Symbols.* +import Contexts.*, Flags.*, Symbols.*, Decorators.em import Constants.Constant import NameKinds.{TailLabelName, TailLocalName, TailTempName} import StdNames.nme @@ -303,7 +303,7 @@ class TailRec extends MiniPhase { def fail(reason: String) = { if (isMandatory) { failureReported = true - report.error(s"Cannot rewrite recursive call: $reason", tree.srcPos) + report.error(em"Cannot rewrite recursive call: $reason", tree.srcPos) } else tailrec.println("Cannot rewrite recursive call at: " + tree.span + " because: " + reason) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 82413e2e6733..34b3183a6b15 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -20,6 +20,8 @@ import ast.{tpd, untpd} import util.Chars._ import collection.mutable import ProtoTypes._ +import staging.StagingLevel +import inlines.Inlines.inInlineMethod import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions @@ -42,10 +44,6 @@ class TreeChecker extends Phase with SymTransformer { private val seenClasses = collection.mutable.HashMap[String, Symbol]() private val seenModuleVals = collection.mutable.HashMap[String, Symbol]() - def isValidJVMName(name: Name): Boolean = name.toString.forall(isValidJVMChar) - - def isValidJVMMethodName(name: Name): Boolean = name.toString.forall(isValidJVMMethodChar) - val NoSuperClassFlags: FlagSet = Trait | Package def testDuplicate(sym: Symbol, registry: mutable.Map[String, Symbol], typ: String)(using Context): Unit = { @@ -91,7 +89,7 @@ class TreeChecker extends Phase with SymTransformer { if (ctx.phaseId <= erasurePhase.id) { val initial = symd.initial assert(symd == initial || symd.signature == initial.signature, - i"""Signature of ${sym.showLocated} changed at phase ${ctx.phase.prevMega} + i"""Signature of ${sym} in ${sym.ownersIterator.toList}%, % changed at phase ${ctx.phase.prevMega} |Initial info: ${initial.info} |Initial sig : ${initial.signature} |Current info: ${symd.info} @@ -109,18 +107,6 @@ class TreeChecker extends Phase with SymTransformer { else if (ctx.phase.prev.isCheckable) check(ctx.base.allPhases.toIndexedSeq, ctx) - private def previousPhases(phases: List[Phase])(using Context): List[Phase] = phases match { - case (phase: MegaPhase) :: phases1 => - val subPhases = phase.miniPhases - val previousSubPhases = previousPhases(subPhases.toList) - if (previousSubPhases.length == subPhases.length) previousSubPhases ::: previousPhases(phases1) - else previousSubPhases - case phase :: phases1 if phase ne ctx.phase => - phase :: previousPhases(phases1) - case _ => - Nil - } - def check(phasesToRun: Seq[Phase], ctx: Context): Tree = { val fusedPhase = ctx.phase.prevMega(using ctx) report.echo(s"checking ${ctx.compilationUnit} after phase ${fusedPhase}")(using ctx) @@ -134,7 +120,6 @@ class TreeChecker extends Phase with SymTransformer { val checkingCtx = ctx .fresh - .addMode(Mode.ImplicitsEnabled) .setReporter(new ThrowingReporter(ctx.reporter)) val checker = inContext(ctx) { @@ -150,9 +135,80 @@ class TreeChecker extends Phase with SymTransformer { } } + /** + * Checks that `New` nodes are always wrapped inside `Select` nodes. + */ + def assertSelectWrapsNew(tree: Tree)(using Context): Unit = + (new TreeAccumulator[tpd.Tree] { + override def apply(parent: Tree, tree: Tree)(using Context): Tree = { + tree match { + case tree: New if !parent.isInstanceOf[tpd.Select] => + assert(assertion = false, i"`New` node must be wrapped in a `Select` of the constructor:\n parent = ${parent.show}\n child = ${tree.show}") + case _: Annotated => + // Don't check inside annotations, since they're allowed to contain + // somewhat invalid trees. + case _ => + foldOver(tree, tree) // replace the parent when folding over the children + } + parent // return the old parent so that my siblings see it + } + })(tpd.EmptyTree, tree) +} + +object TreeChecker { + /** - Check that TypeParamRefs and MethodParams refer to an enclosing type. + * - Check that all type variables are instantiated. + */ + def checkNoOrphans(tp0: Type, tree: untpd.Tree = untpd.EmptyTree)(using Context): Type = new TypeMap() { + val definedBinders = new java.util.IdentityHashMap[Type, Any] + def apply(tp: Type): Type = { + tp match { + case tp: BindingType => + definedBinders.put(tp, tp) + mapOver(tp) + definedBinders.remove(tp) + case tp: ParamRef => + assert(definedBinders.get(tp.binder) != null, s"orphan param: ${tp.show}, hash of binder = ${System.identityHashCode(tp.binder)}, tree = ${tree.show}, type = $tp0") + case tp: TypeVar => + assert(tp.isInstantiated, s"Uninstantiated type variable: ${tp.show}, tree = ${tree.show}") + apply(tp.underlying) + case _ => + mapOver(tp) + } + tp + } + }.apply(tp0) + + /** Run some additional checks on the nodes of the trees. Specifically: + * + * - TypeTree can only appear in TypeApply args, New, Typed tpt, Closure + * tpt, SeqLiteral elemtpt, ValDef tpt, DefDef tpt, and TypeDef rhs. + */ + object TreeNodeChecker extends untpd.TreeTraverser: + import untpd._ + def traverse(tree: Tree)(using Context) = tree match + case t: TypeTree => assert(assertion = false, i"TypeTree not expected: $t") + case t @ TypeApply(fun, _targs) => traverse(fun) + case t @ New(_tpt) => + case t @ Typed(expr, _tpt) => traverse(expr) + case t @ Closure(env, meth, _tpt) => traverse(env); traverse(meth) + case t @ SeqLiteral(elems, _elemtpt) => traverse(elems) + case t @ ValDef(_, _tpt, _) => traverse(t.rhs) + case t @ DefDef(_, paramss, _tpt, _) => for params <- paramss do traverse(params); traverse(t.rhs) + case t @ TypeDef(_, _rhs) => + case t @ Template(constr, _, self, _) => traverse(constr); traverse(t.parentsOrDerived); traverse(self); traverse(t.body) + case t => traverseChildren(t) + end traverse + + private[TreeChecker] def isValidJVMName(name: Name): Boolean = name.toString.forall(isValidJVMChar) + + private[TreeChecker] def isValidJVMMethodName(name: Name): Boolean = name.toString.forall(isValidJVMMethodChar) + + class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking { + import ast.tpd._ - private val nowDefinedSyms = util.HashSet[Symbol]() + protected val nowDefinedSyms = util.HashSet[Symbol]() private val patBoundSyms = util.HashSet[Symbol]() private val everDefinedSyms = MutableSymbolMap[untpd.Tree]() @@ -248,10 +304,9 @@ class TreeChecker extends Phase with SymTransformer { // case tree: untpd.Ident => // case tree: untpd.Select => // case tree: untpd.Bind => - case vd : ValDef => - assertIdentNotJavaClass(vd.forceIfLazy) - case dd : DefDef => - assertIdentNotJavaClass(dd.forceIfLazy) + case md: ValOrDefDef => + md.forceFields() + assertIdentNotJavaClass(md) // case tree: untpd.TypeDef => case Apply(fun, args) => assertIdentNotJavaClass(fun) @@ -376,7 +431,7 @@ class TreeChecker extends Phase with SymTransformer { override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree = { assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase) - assert(tree.isType || ctx.mode.is(Mode.Pattern) && untpd.isWildcardArg(tree) || !needsSelect(tree.tpe), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}") + assert(tree.isType || ctx.mode.is(Mode.Pattern) && untpd.isWildcardArg(tree) || !needsSelect(tree.typeOpt), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}") assertDefined(tree) checkNotRepeated(super.typedIdent(tree, pt)) @@ -392,10 +447,12 @@ class TreeChecker extends Phase with SymTransformer { // Polymorphic apply methods stay structural until Erasure val isPolyFunctionApply = (tree.name eq nme.apply) && tree.qualifier.typeOpt.derivesFrom(defn.PolyFunctionClass) + // Erased functions stay structural until Erasure + val isErasedFunctionApply = (tree.name eq nme.apply) && tree.qualifier.typeOpt.derivesFrom(defn.ErasedFunctionClass) // Outer selects are pickled specially so don't require a symbol val isOuterSelect = tree.name.is(OuterSelectName) val isPrimitiveArrayOp = ctx.erasedTypes && nme.isPrimitiveName(tree.name) - if !(tree.isType || isPolyFunctionApply || isOuterSelect || isPrimitiveArrayOp) then + if !(tree.isType || isPolyFunctionApply || isErasedFunctionApply || isOuterSelect || isPrimitiveArrayOp) then val denot = tree.denot assert(denot.exists, i"Selection $tree with type $tpe does not have a denotation") assert(denot.symbol.exists, i"Denotation $denot of selection $tree with type $tpe does not have a symbol, qualifier type = ${tree.qualifier.typeOpt}") @@ -417,11 +474,11 @@ class TreeChecker extends Phase with SymTransformer { sym == mbr || sym.overriddenSymbol(mbr.owner.asClass) == mbr || mbr.overriddenSymbol(sym.owner.asClass) == sym), - ex"""symbols differ for $tree - |was : $sym - |alternatives by type: $memberSyms%, % of types ${memberSyms.map(_.info)}%, % - |qualifier type : ${qualTpe} - |tree type : ${tree.typeOpt} of class ${tree.typeOpt.getClass}""") + i"""symbols differ for $tree + |was : $sym + |alternatives by type: $memberSyms%, % of types ${memberSyms.map(_.info)}%, % + |qualifier type : ${qualTpe} + |tree type : ${tree.typeOpt} of class ${tree.typeOpt.getClass}""") } checkNotRepeated(super.typedSelect(tree, pt)) @@ -458,7 +515,7 @@ class TreeChecker extends Phase with SymTransformer { val inliningPhase = ctx.base.inliningPhase inliningPhase.exists && ctx.phase.id > inliningPhase.id if isAfterInlining then - // The staging phase destroys in PCPCheckAndHeal the property that + // The staging phase destroys in CrossStageSafety the property that // tree.expr.tpe <:< pt1. A test case where this arises is run-macros/enum-nat-macro. // We should follow up why this happens. If the problem is fixed, we can // drop the isAfterInlining special case. To reproduce the problem, just @@ -478,11 +535,16 @@ class TreeChecker extends Phase with SymTransformer { i"owner chain = ${tree.symbol.ownersIterator.toList}%, %, ctxOwners = ${ctx.outersIterator.map(_.owner).toList}%, %") } + override def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { + assert(sym.info.isInstanceOf[ClassInfo | TypeBounds], i"wrong type, expect a template or type bounds for ${sym.fullName}, but found: ${sym.info}") + super.typedTypeDef(tdef, sym) + } + override def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = { val TypeDef(_, impl @ Template(constr, _, _, _)) = cdef: @unchecked assert(cdef.symbol == cls) assert(impl.symbol.owner == cls) - assert(constr.symbol.owner == cls) + assert(constr.symbol.owner == cls, i"constr ${constr.symbol} in $cdef has wrong owner; should be $cls but is ${constr.symbol.owner}") assert(cls.primaryConstructor == constr.symbol, i"mismatch, primary constructor ${cls.primaryConstructor}, in tree = ${constr.symbol}") checkOwner(impl) checkOwner(impl.constr) @@ -597,12 +659,48 @@ class TreeChecker extends Phase with SymTransformer { else super.typedPackageDef(tree) + override def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = + if ctx.phase <= stagingPhase.prev then + assert(tree.tags.isEmpty, i"unexpected tags in Quote before staging phase: ${tree.tags}") + else + assert(!tree.body.isInstanceOf[untpd.Splice] || inInlineMethod, i"missed quote cancellation in $tree") + assert(!tree.body.isInstanceOf[untpd.Hole] || inInlineMethod, i"missed quote cancellation in $tree") + if StagingLevel.level != 0 then + assert(tree.tags.isEmpty, i"unexpected tags in Quote at staging level ${StagingLevel.level}: ${tree.tags}") + + for tag <- tree.tags do + assert(tag.isInstanceOf[RefTree], i"expected RefTree in Quote but was: $tag") + + val tree1 = super.typedQuote(tree, pt) + for tag <- tree.tags do + assert(tag.typeOpt.derivesFrom(defn.QuotedTypeClass), i"expected Quote tag to be of type `Type` but was: ${tag.tpe}") + + tree1 match + case Quote(body, targ :: Nil) if body.isType => + assert(!(body.tpe =:= targ.tpe.select(tpnme.Underlying)), i"missed quote cancellation in $tree1") + case _ => + + tree1 + + override def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = + if stagingPhase <= ctx.phase then + assert(!tree.expr.isInstanceOf[untpd.Quote] || inInlineMethod, i"missed quote cancellation in $tree") + super.typedSplice(tree, pt) + override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = { - val tree1 @ Hole(isTermHole, _, args, content, tpt) = super.typedHole(tree, pt): @unchecked + val tree1 @ Hole(isTerm, idx, args, content) = super.typedHole(tree, pt): @unchecked + + assert(idx >= 0, i"hole should not have negative index: $tree") + assert(isTerm || tree.args.isEmpty, i"type hole should not have arguments: $tree") + + // Check that we only add the captured type `T` instead of a more complex type like `List[T]`. + // If we have `F[T]` with captured `F` and `T`, we should list `F` and `T` separately in the args. + for arg <- args do + assert(arg.isTerm || arg.tpe.isInstanceOf[TypeRef], "Expected TypeRef in Hole type args but got: " + arg.tpe) // Check result type of the hole - if isTermHole then assert(tpt.typeOpt <:< pt) - else assert(tpt.typeOpt =:= pt) + if isTerm then assert(tree1.typeOpt <:< pt) + else assert(tree1.typeOpt =:= pt) // Check that the types of the args conform to the types of the contents of the hole val argQuotedTypes = args.map { arg => @@ -614,16 +712,16 @@ class TreeChecker extends Phase with SymTransformer { defn.AnyType case tpe => tpe defn.QuotedExprClass.typeRef.appliedTo(tpe) - else defn.QuotedTypeClass.typeRef.appliedTo(arg.typeOpt) + else defn.QuotedTypeClass.typeRef.appliedTo(arg.typeOpt.widenTermRefExpr) } val expectedResultType = - if isTermHole then defn.QuotedExprClass.typeRef.appliedTo(tpt.typeOpt) - else defn.QuotedTypeClass.typeRef.appliedTo(tpt.typeOpt) + if isTerm then defn.QuotedExprClass.typeRef.appliedTo(tree1.typeOpt) + else defn.QuotedTypeClass.typeRef.appliedTo(tree1.typeOpt) val contextualResult = defn.FunctionOf(List(defn.QuotesClass.typeRef), expectedResultType, isContextual = true) val expectedContentType = defn.FunctionOf(argQuotedTypes, contextualResult) - assert(content.typeOpt =:= expectedContentType) + assert(content.typeOpt =:= expectedContentType, i"unexpected content of hole\nexpected: ${expectedContentType}\nwas: ${content.typeOpt}") tree1 } @@ -658,68 +756,55 @@ class TreeChecker extends Phase with SymTransformer { override def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = tree } - /** - * Checks that `New` nodes are always wrapped inside `Select` nodes. - */ - def assertSelectWrapsNew(tree: Tree)(using Context): Unit = - (new TreeAccumulator[tpd.Tree] { - override def apply(parent: Tree, tree: Tree)(using Context): Tree = { - tree match { - case tree: New if !parent.isInstanceOf[tpd.Select] => - assert(assertion = false, i"`New` node must be wrapped in a `Select`:\n parent = ${parent.show}\n child = ${tree.show}") - case _: Annotated => - // Don't check inside annotations, since they're allowed to contain - // somewhat invalid trees. - case _ => - foldOver(tree, tree) // replace the parent when folding over the children - } - parent // return the old parent so that my siblings see it - } - })(tpd.EmptyTree, tree) -} + /** Tree checker that can be applied to a local tree. */ + class LocalChecker(phasesToCheck: Seq[Phase]) extends Checker(phasesToCheck: Seq[Phase]): + override def assertDefined(tree: untpd.Tree)(using Context): Unit = + // Only check definitions nested in the local tree + if nowDefinedSyms.contains(tree.symbol.maybeOwner) then + super.assertDefined(tree) -object TreeChecker { - /** - Check that TypeParamRefs and MethodParams refer to an enclosing type. - * - Check that all type variables are instantiated. - */ - def checkNoOrphans(tp0: Type, tree: untpd.Tree = untpd.EmptyTree)(using Context): Type = new TypeMap() { - val definedBinders = new java.util.IdentityHashMap[Type, Any] - def apply(tp: Type): Type = { - tp match { - case tp: BindingType => - definedBinders.put(tp, tp) - mapOver(tp) - definedBinders.remove(tp) - case tp: ParamRef => - assert(definedBinders.get(tp.binder) != null, s"orphan param: ${tp.show}, hash of binder = ${System.identityHashCode(tp.binder)}, tree = ${tree.show}, type = $tp0") - case tp: TypeVar => - assert(tp.isInstantiated, s"Uninstantiated type variable: ${tp.show}, tree = ${tree.show}") - apply(tp.underlying) - case _ => - mapOver(tp) - } - tp - } - }.apply(tp0) + def checkMacroGeneratedTree(original: tpd.Tree, expansion: tpd.Tree)(using Context): Unit = + if ctx.settings.XcheckMacros.value then + val checkingCtx = ctx + .fresh + .setReporter(new ThrowingReporter(ctx.reporter)) + val phases = ctx.base.allPhases.toList + val treeChecker = new LocalChecker(previousPhases(phases)) + + try treeChecker.typed(expansion)(using checkingCtx) + catch + case err: java.lang.AssertionError => + val stack = + if !ctx.settings.Ydebug.value then "\nstacktrace available when compiling with `-Ydebug`" + else if err.getStackTrace == null then " no stacktrace" + else err.getStackTrace.nn.mkString(" ", " \n", "") + + report.error( + s"""Malformed tree was found while expanding macro with -Xcheck-macros. + |The tree does not conform to the compiler's tree invariants. + | + |Macro was: + |${scala.quoted.runtime.impl.QuotesImpl.showDecompiledTree(original)} + | + |The macro returned: + |${scala.quoted.runtime.impl.QuotesImpl.showDecompiledTree(expansion)} + | + |Error: + |${err.getMessage} + |$stack + |""", + original + ) - /** Run some additional checks on the nodes of the trees. Specifically: - * - * - TypeTree can only appear in TypeApply args, New, Typed tpt, Closure - * tpt, SeqLiteral elemtpt, ValDef tpt, DefDef tpt, and TypeDef rhs. - */ - object TreeNodeChecker extends untpd.TreeTraverser: - import untpd._ - def traverse(tree: Tree)(using Context) = tree match - case t: TypeTree => assert(assertion = false, i"TypeTree not expected: $t") - case t @ TypeApply(fun, _targs) => traverse(fun) - case t @ New(_tpt) => - case t @ Typed(expr, _tpt) => traverse(expr) - case t @ Closure(env, meth, _tpt) => traverse(env); traverse(meth) - case t @ SeqLiteral(elems, _elemtpt) => traverse(elems) - case t @ ValDef(_, _tpt, _) => traverse(t.rhs) - case t @ DefDef(_, paramss, _tpt, _) => for params <- paramss do traverse(params); traverse(t.rhs) - case t @ TypeDef(_, _rhs) => - case t @ Template(constr, parents, self, _) => traverse(constr); traverse(parents); traverse(self); traverse(t.body) - case t => traverseChildren(t) - end traverse + private[TreeChecker] def previousPhases(phases: List[Phase])(using Context): List[Phase] = phases match { + case (phase: MegaPhase) :: phases1 => + val subPhases = phase.miniPhases + val previousSubPhases = previousPhases(subPhases.toList) + if (previousSubPhases.length == subPhases.length) previousSubPhases ::: previousPhases(phases1) + else previousSubPhases + case phase :: phases1 if phase ne ctx.phase => + phase :: previousPhases(phases1) + case _ => + Nil + } } diff --git a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala index 6bc2f438eb37..6fba0bca4ce3 100644 --- a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala +++ b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala @@ -145,7 +145,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { val size = tpes.size val n = nTpe.value.intValue if (n < 0 || n >= size) { - report.error("index out of bounds: " + n, nTree.underlyingArgument.srcPos) + report.error(em"index out of bounds: $n", nTree.underlyingArgument.srcPos) tree } else if (size <= MaxTupleArity) @@ -155,7 +155,7 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { // tup.asInstanceOf[TupleXXL].productElement(n) tup.asInstance(defn.TupleXXLClass.typeRef).select(nme.productElement).appliedTo(Literal(nTpe.value)) case (None, nTpe: ConstantType) if nTpe.value.intValue < 0 => - report.error("index out of bounds: " + nTpe.value.intValue, nTree.srcPos) + report.error(em"index out of bounds: ${nTpe.value.intValue}", nTree.srcPos) tree case _ => // No optimization, keep: diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index b2a101649457..f5cb8eab73a4 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -16,6 +16,8 @@ import util.Spans._ import reporting._ import config.Printers.{ transforms => debug } +import patmat.Typ + /** This transform normalizes type tests and type casts, * also replacing type tests with singleton argument type with reference equality check * Any remaining type tests @@ -51,7 +53,8 @@ object TypeTestsCasts { * 6. if `P = T1 | T2` or `P = T1 & T2`, checkable(X, T1) && checkable(X, T2). * 7. if `P` is a refinement type, "it's a refinement type" * 8. if `P` is a local class which is not statically reachable from the scope where `X` is defined, "it's a local class" - * 9. otherwise, "" + * 9. if `X` is `T1 | T2`, checkable(T1, P) && checkable(T2, P). + * 10. otherwise, "" */ def whyUncheckable(X: Type, P: Type, span: Span)(using Context): String = atPhase(Phases.refchecksPhase.next) { extension (inline s1: String) inline def &&(inline s2: String): String = if s1 == "" then s2 else s1 @@ -129,7 +132,8 @@ object TypeTestsCasts { } - def recur(X: Type, P: Type): String = (X <:< P) ||| (P.dealias match { + def recur(X: Type, P: Type): String = trace(s"recur(${X.show}, ${P.show})") { + (X <:< P) ||| P.dealias.match case _: SingletonType => "" case _: TypeProxy if isAbstract(P) => i"it refers to an abstract type member or type parameter" @@ -138,7 +142,7 @@ object TypeTestsCasts { case defn.ArrayOf(tpE) => recur(tpE, tpT) case _ => recur(defn.AnyType, tpT) } - case tpe: AppliedType => + case tpe @ AppliedType(tycon, targs) => X.widenDealias match { case OrType(tp1, tp2) => // This case is required to retrofit type inference, @@ -147,10 +151,10 @@ object TypeTestsCasts { // - T1 & T2 <:< T3 // See TypeComparer#either recur(tp1, P) && recur(tp2, P) - case _ => + + case x => // always false test warnings are emitted elsewhere - X.classSymbol.exists && P.classSymbol.exists && - !X.classSymbol.asClass.mayHaveCommonChild(P.classSymbol.asClass) + TypeComparer.provablyDisjoint(x, tpe.derivedAppliedType(tycon, targs.map(_ => WildcardType))) || typeArgsTrivial(X, tpe) ||| i"its type arguments can't be determined from $X" } @@ -164,7 +168,7 @@ object TypeTestsCasts { if P.classSymbol.isLocal && foundClasses(X).exists(P.classSymbol.isInaccessibleChildOf) => // 8 i"it's a local class" case _ => "" - }) + } val res = recur(X.widen, replaceP(P)) @@ -241,7 +245,7 @@ object TypeTestsCasts { val foundEffectiveClass = effectiveClass(expr.tpe.widen) if foundEffectiveClass.isPrimitiveValueClass && !testCls.isPrimitiveValueClass then - report.error(i"cannot test if value of $exprType is a reference of $testCls", tree.srcPos) + report.error(em"cannot test if value of $exprType is a reference of $testCls", tree.srcPos) false else foundClasses.exists(check) end checkSensical @@ -302,8 +306,8 @@ object TypeTestsCasts { /** Transform isInstanceOf * - * expr.isInstanceOf[A | B] ~~> expr.isInstanceOf[A] | expr.isInstanceOf[B] - * expr.isInstanceOf[A & B] ~~> expr.isInstanceOf[A] & expr.isInstanceOf[B] + * expr.isInstanceOf[A | B] ~~> expr.isInstanceOf[A] | expr.isInstanceOf[B] + * expr.isInstanceOf[A & B] ~~> expr.isInstanceOf[A] & expr.isInstanceOf[B] * expr.isInstanceOf[Tuple] ~~> scala.runtime.Tuples.isInstanceOfTuple(expr) * expr.isInstanceOf[EmptyTuple] ~~> scala.runtime.Tuples.isInstanceOfEmptyTuple(expr) * expr.isInstanceOf[NonEmptyTuple] ~~> scala.runtime.Tuples.isInstanceOfNonEmptyTuple(expr) @@ -345,7 +349,7 @@ object TypeTestsCasts { val testWidened = testType.widen defn.untestableClasses.find(testWidened.isRef(_)) match case Some(untestable) => - report.error(i"$untestable cannot be used in runtime type tests", tree.srcPos) + report.error(em"$untestable cannot be used in runtime type tests", tree.srcPos) constant(expr, Literal(Constant(false))) case _ => val erasedTestType = erasure(testType) @@ -359,7 +363,7 @@ object TypeTestsCasts { if !isTrusted && !isUnchecked then val whyNot = whyUncheckable(expr.tpe, argType, tree.span) if whyNot.nonEmpty then - report.uncheckedWarning(i"the type test for $argType cannot be checked at runtime because $whyNot", expr.srcPos) + report.uncheckedWarning(em"the type test for $argType cannot be checked at runtime because $whyNot", expr.srcPos) transformTypeTest(expr, argType, flagUnrelated = enclosingInlineds.isEmpty) // if test comes from inlined code, dont't flag it even if it always false } diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala index 5b6e36343379..a897503ef275 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala @@ -76,7 +76,7 @@ object TypeUtils { case AndType(tp1, tp2) => // We assume that we have the following property: // (T1, T2, ..., Tn) & (U1, U2, ..., Un) = (T1 & U1, T2 & U2, ..., Tn & Un) - tp1.tupleElementTypes.zip(tp2.tupleElementTypes).map { case (t1, t2) => t1 & t2 } + tp1.tupleElementTypes.zip(tp2.tupleElementTypes).map { case (t1, t2) => t1.intersect(t2) } case OrType(tp1, tp2) => None // We can't combine the type of two tuples case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala index a86bf2c48fb5..28d1255eaa72 100644 --- a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala @@ -22,15 +22,14 @@ object ValueClasses { } def isMethodWithExtension(sym: Symbol)(using Context): Boolean = - atPhaseNoLater(extensionMethodsPhase) { - val d = sym.denot - d.validFor.containsPhaseId(ctx.phaseId) && - d.isRealMethod && - isDerivedValueClass(d.owner) && - !d.isConstructor && - !d.symbol.isSuperAccessor && - !d.is(Macro) - } + val d = sym.denot.initial + d.validFor.firstPhaseId <= extensionMethodsPhase.id + && d.isRealMethod + && isDerivedValueClass(d.owner) + && !d.isConstructor + && !d.symbol.isSuperAccessor + && !d.isInlineMethod + && !d.is(Macro) /** The member of a derived value class that unboxes it. */ def valueClassUnbox(cls: ClassSymbol)(using Context): Symbol = diff --git a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala index ba42d826fe82..8080a7c911b3 100644 --- a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala +++ b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala @@ -61,6 +61,7 @@ class YCheckPositions extends Phase { private def isMacro(call: Tree)(using Context) = call.symbol.is(Macro) || + (call.symbol.isClass && call.tpe.derivesFrom(defn.MacroAnnotationClass)) || // The call of a macro after typer is encoded as a Select while other inlines are Ident // TODO remove this distinction once Inline nodes of expanded macros can be trusted (also in Inliner.inlineCallTrace) (!(ctx.phase <= postTyperPhase) && call.isInstanceOf[Select]) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Cache.scala b/compiler/src/dotty/tools/dotc/transform/init/Cache.scala new file mode 100644 index 000000000000..c0391a05262d --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Cache.scala @@ -0,0 +1,201 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* + +import ast.tpd +import tpd.Tree + +/** The co-inductive cache used for analysis + * + * The cache contains two maps from `(Config, Tree)` to `Res`: + * + * - input cache (`this.last`) + * - output cache (`this.current`) + * + * The two caches are required because we want to make sure in a new iteration, + * an expression is evaluated exactly once. The monotonicity of the analysis + * ensures that the cache state goes up the lattice of the abstract domain, + * consequently the algorithm terminates. + * + * The general skeleton for usage of the cache is as follows + * + * def analysis(entryExp: Expr) = { + * def iterate(entryExp: Expr)(using Cache) = + * eval(entryExp, initConfig) + * if cache.hasChanged && noErrors then + * cache.last = cache.current + * cache.current = Empty + * cache.changed = false + * iterate(entryExp) + * else + * reportErrors + * + * + * def eval(expr: Expr, config: Config)(using Cache) = + * cache.cachedEval(config, expr) { + * // Actual recursive evaluation of expression. + * // + * // Only executed if the entry `(exp, config)` is not in the output cache. + * } + * + * iterate(entryExp)(using new Cache) + * } + * + * See the documentation for the method `Cache.cachedEval` for more information. + * + * What goes to the configuration (`Config`) and what goes to the result (`Res`) + * need to be decided by the specific analysis and justified by reasoning about + * soundness. + * + * @tparam Config The analysis state that matters for evaluating an expression. + * @tparam Res The result from the evaluation the given expression. + */ +class Cache[Config, Res]: + import Cache.* + + /** The cache for expression values from last iteration */ + protected var last: ExprValueCache[Config, Res] = Map.empty + + /** The output cache for expression values + * + * The output cache is computed based on the cache values `last` from the + * last iteration. + * + * Both `last` and `current` are required to make sure an encountered + * expression is evaluated once in each iteration. + */ + protected var current: ExprValueCache[Config, Res] = Map.empty + + /** Whether the current heap is different from the last heap? + * + * `changed == false` implies that the fixed point has been reached. + */ + protected var changed: Boolean = false + + /** Whether any value in the output cache (this.current) was accessed + * after being added. If no cached values are used after they are added + * for the first time then another iteration of analysis is not needed. + */ + protected var cacheUsed: Boolean = false + + /** Used to avoid allocation, its state does not matter */ + protected given MutableTreeWrapper = new MutableTreeWrapper + + def get(config: Config, expr: Tree): Option[Res] = + val res = current.get(config, expr) + cacheUsed = cacheUsed || res.nonEmpty + res + + /** Evaluate an expression with cache + * + * The algorithmic skeleton is as follows: + * + * if don't cache result then + * return eval(expr) + * if this.current.contains(config, expr) then + * return cached value + * else + * val assumed = this.last(config, expr) or bottom value if absent + * this.current(config, expr) = assumed + * val actual = eval(expr) + * + * if assumed != actual then + * this.changed = true + * this.current(config, expr) = actual + * + */ + def cachedEval(config: Config, expr: Tree, cacheResult: Boolean, default: Res)(eval: Tree => Res): Res = + if !cacheResult then + eval(expr) + else + this.get(config, expr) match + case Some(value) => value + case None => + val assumeValue: Res = + this.last.get(config, expr) match + case Some(value) => value + case None => + this.last = this.last.updatedNested(config, expr, default) + default + + this.current = this.current.updatedNested(config, expr, assumeValue) + + val actual = eval(expr) + if actual != assumeValue then + // println("Changed! from = " + assumeValue + ", to = " + actual) + this.changed = true + this.current = this.current.updatedNested(config, expr, actual) + // this.current = this.current.removed(config, expr) + end if + + actual + end if + end cachedEval + + def hasChanged = changed + + def isUsed = cacheUsed + + /** Prepare cache for the next iteration + * + * 1. Reset changed flag. + * + * 2. Use current cache as last cache and set current cache to be empty. + */ + def prepareForNextIteration()(using Context) = + this.changed = false + this.cacheUsed = false + this.last = this.current + this.current = Map.empty +end Cache + +object Cache: + type ExprValueCache[Config, Res] = Map[Config, Map[TreeWrapper, Res]] + + /** A wrapper for trees for storage in maps based on referential equality of trees. */ + abstract class TreeWrapper: + def tree: Tree + + override final def equals(other: Any): Boolean = + other match + case that: TreeWrapper => this.tree eq that.tree + case _ => false + + override final def hashCode = tree.hashCode + + /** The immutable wrapper is intended to be stored as key in the heap. */ + class ImmutableTreeWrapper(val tree: Tree) extends TreeWrapper + + /** For queries on the heap, reuse the same wrapper to avoid unnecessary allocation. + * + * A `MutableTreeWrapper` is only ever used temporarily for querying a map, + * and is never inserted to the map. + */ + class MutableTreeWrapper extends TreeWrapper: + var queryTree: Tree | Null = null + def tree: Tree = queryTree match + case tree: Tree => tree + case null => ??? + + extension [Config, Res](cache: ExprValueCache[Config, Res]) + def get(config: Config, expr: Tree)(using queryWrapper: MutableTreeWrapper): Option[Res] = + queryWrapper.queryTree = expr + cache.get(config).flatMap(_.get(queryWrapper)) + + def removed(config: Config, expr: Tree)(using queryWrapper: MutableTreeWrapper) = + queryWrapper.queryTree = expr + val innerMap2 = cache(config).removed(queryWrapper) + cache.updated(config, innerMap2) + + def updatedNested(config: Config, expr: Tree, result: Res): ExprValueCache[Config, Res] = + val wrapper = new ImmutableTreeWrapper(expr) + updatedNestedWrapper(config, wrapper, result) + + def updatedNestedWrapper(config: Config, wrapper: ImmutableTreeWrapper, result: Res): ExprValueCache[Config, Res] = + val innerMap = cache.getOrElse(config, Map.empty[TreeWrapper, Res]) + val innerMap2 = innerMap.updated(wrapper, result) + cache.updated(config, innerMap2) + end extension diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala index 7d92d2b2a921..366fd6be96a2 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala @@ -5,109 +5,64 @@ package init import ast.tpd._ import core._ -import util.SourcePosition import util.Property -import Decorators._, printing.SyntaxHighlighting +import util.SourcePosition import Types._, Symbols._, Contexts._ -import scala.collection.mutable +import Trace.Trace object Errors: private val IsFromPromotion = new Property.Key[Boolean] sealed trait Error: - def trace: Seq[Tree] + def trace: Trace def show(using Context): String - def pos(using Context): SourcePosition = trace.last.sourcePos + def pos(using Context): SourcePosition = Trace.position(using trace).sourcePos def stacktrace(using Context): String = val preamble: String = if ctx.property(IsFromPromotion).nonEmpty then " Promotion trace:\n" else " Calling trace:\n" - buildStacktrace(trace, preamble) + Trace.buildStacktrace(trace, preamble) def issue(using Context): Unit = report.warning(show, this.pos) end Error - def buildStacktrace(trace: Seq[Tree], preamble: String)(using Context): String = if trace.isEmpty then "" else preamble + { - var lastLineNum = -1 - var lines: mutable.ArrayBuffer[String] = new mutable.ArrayBuffer - trace.foreach { tree => - val pos = tree.sourcePos - val prefix = "-> " - val line = - if pos.source.exists then - val loc = "[ " + pos.source.file.name + ":" + (pos.line + 1) + " ]" - val code = SyntaxHighlighting.highlight(pos.lineContent.trim.nn) - i"$code\t$loc" - else - tree.show - val positionMarkerLine = - if pos.exists && pos.source.exists then - positionMarker(pos) - else "" - - // always use the more precise trace location - if lastLineNum == pos.line then - lines.dropRightInPlace(1) - - lines += (prefix + line + "\n" + positionMarkerLine) - - lastLineNum = pos.line - } - val sb = new StringBuilder - for line <- lines do sb.append(line) - sb.toString - } - - /** Used to underline source positions in the stack trace - * pos.source must exist - */ - private def positionMarker(pos: SourcePosition): String = - val trimmed = pos.lineContent.takeWhile(c => c.isWhitespace).length - val padding = pos.startColumnPadding.substring(trimmed).nn + " " - val carets = - if (pos.startLine == pos.endLine) - "^" * math.max(1, pos.endColumn - pos.startColumn) - else "^" - - s"$padding$carets\n" - override def toString() = this.getClass.getName.nn /** Access non-initialized field */ - case class AccessNonInit(field: Symbol)(val trace: Seq[Tree]) extends Error: - def source: Tree = trace.last + case class AccessNonInit(field: Symbol)(val trace: Trace) extends Error: + def source: Tree = Trace.position(using trace) def show(using Context): String = "Access non-initialized " + field.show + "." + stacktrace override def pos(using Context): SourcePosition = field.sourcePos /** Promote a value under initialization to fully-initialized */ - case class PromoteError(msg: String)(val trace: Seq[Tree]) extends Error: + case class PromoteError(msg: String)(val trace: Trace) extends Error: def show(using Context): String = msg + stacktrace - case class AccessCold(field: Symbol)(val trace: Seq[Tree]) extends Error: + case class AccessCold(field: Symbol)(val trace: Trace) extends Error: def show(using Context): String = - "Access field " + field.show + " on a cold object." + stacktrace + "Access field " + field.show + " on an uninitialized (Cold) object." + stacktrace - case class CallCold(meth: Symbol)(val trace: Seq[Tree]) extends Error: + case class CallCold(meth: Symbol)(val trace: Trace) extends Error: def show(using Context): String = - "Call method " + meth.show + " on a cold object." + stacktrace + "Call method " + meth.show + " on an uninitialized (Cold) object." + stacktrace - case class CallUnknown(meth: Symbol)(val trace: Seq[Tree]) extends Error: + case class CallUnknown(meth: Symbol)(val trace: Trace) extends Error: def show(using Context): String = val prefix = if meth.is(Flags.Method) then "Calling the external method " else "Accessing the external field" prefix + meth.show + " may cause initialization errors." + stacktrace /** Promote a value under initialization to fully-initialized */ - case class UnsafePromotion(msg: String, error: Error)(val trace: Seq[Tree]) extends Error: + case class UnsafePromotion(msg: String, error: Error)(val trace: Trace) extends Error: def show(using Context): String = msg + stacktrace + "\n" + - "Promoting the value to hot (transitively initialized) failed due to the following problem:\n" + { + "Promoting the value to transitively initialized (Hot) failed due to the following problem:\n" + { val ctx2 = ctx.withProperty(IsFromPromotion, Some(true)) error.show(using ctx2) } @@ -116,7 +71,7 @@ object Errors: * * Invariant: argsIndices.nonEmpty */ - case class UnsafeLeaking(error: Error, nonHotOuterClass: Symbol, argsIndices: List[Int])(val trace: Seq[Tree]) extends Error: + case class UnsafeLeaking(error: Error, nonHotOuterClass: Symbol, argsIndices: List[Int])(val trace: Trace) extends Error: def show(using Context): String = "Problematic object instantiation: " + argumentInfo() + stacktrace + "\n" + "It leads to the following error during object initialization:\n" + @@ -141,5 +96,5 @@ object Errors: acc + text2 } val verb = if multiple then " are " else " is " - val adjective = "not hot (transitively initialized)." + val adjective = "not transitively initialized (Hot)." subject + verb + adjective diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index a48aa77fe79f..4548dccb598f 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -15,10 +15,19 @@ import config.Printers.init as printer import reporting.trace as log import Errors.* +import Trace.* +import Util.* +import Cache.* import scala.collection.mutable import scala.annotation.tailrec +/** + * Checks safe initialization of objects + * + * This algorithm cannot handle safe access of global object names. That part + * is handled by the check in `Objects` (@see Objects). + */ object Semantic: // ----- Domain definitions -------------------------------- @@ -55,16 +64,18 @@ object Semantic: sealed abstract class Value: def show(using Context): String = this match case ThisRef(klass) => - "ThisRef[" + klass.show + "]" + "the original object of type (" + klass.show + ") where initialization checking started" case Warm(klass, outer, ctor, args) => val argsText = if args.nonEmpty then ", args = " + args.map(_.show).mkString("(", ", ", ")") else "" - "Warm[" + klass.show + "] { outer = " + outer.show + argsText + " }" + "a non-transitively initialized (Warm) object of type (" + klass.show + ") { outer = " + outer.show + argsText + " }" case Fun(expr, thisV, klass) => - "Fun { this = " + thisV.show + ", owner = " + klass.show + " }" + "a function where \"this\" is (" + thisV.show + ")" case RefSet(values) => values.map(_.show).mkString("Set { ", ", ", " }") - case _ => - this.toString() + case Hot => + "a transitively initialized (Hot) object" + case Cold => + "an uninitialized (Cold) object" def isHot = this == Hot def isCold = this == Cold @@ -117,7 +128,7 @@ object Semantic: assert(!populatingParams, "the object is already populating parameters") populatingParams = true val tpl = klass.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] - extendTrace(klass.defTree) { this.callConstructor(ctor, args.map(arg => ArgInfo(arg, trace))) } + extendTrace(klass.defTree) { this.callConstructor(ctor, args.map(arg => new ArgInfo(arg, trace))) } populatingParams = false this } @@ -207,7 +218,7 @@ object Semantic: object Cache: /** Cache for expressions * - * Ref -> Tree -> Value + * Value -> Tree -> Value * * The first key is the value of `this` for the expression. * @@ -233,66 +244,27 @@ object Semantic: * that could be reused to check other classes. We employ this trick to * improve performance of the analysis. */ - private type ExprValueCache = Map[Value, Map[TreeWrapper, Value]] /** The heap for abstract objects * - * The heap objects are immutable. - */ - private type Heap = Map[Ref, Objekt] - - /** A wrapper for trees for storage in maps based on referential equality of trees. */ - private abstract class TreeWrapper: - def tree: Tree - - override final def equals(other: Any): Boolean = - other match - case that: TreeWrapper => this.tree eq that.tree - case _ => false - - override final def hashCode = tree.hashCode - - /** The immutable wrapper is intended to be stored as key in the heap. */ - private class ImmutableTreeWrapper(val tree: Tree) extends TreeWrapper - - /** For queries on the heap, reuse the same wrapper to avoid unnecessary allocation. + * The heap objects are immutable and its values are essentially derived + * from the cache, thus they are not part of the configuration. * - * A `MutableTreeWrapper` is only ever used temporarily for querying a map, - * and is never inserted to the map. + * The only exception is the object correspond to `ThisRef`, where the + * object remembers the set of initialized fields. That information is reset + * in each iteration thus is harmless. */ - private class MutableTreeWrapper extends TreeWrapper: - var queryTree: Tree | Null = null - def tree: Tree = queryTree match - case tree: Tree => tree - case null => ??? - - class Cache: - /** The cache for expression values from last iteration */ - private var last: ExprValueCache = Map.empty + private type Heap = Map[Ref, Objekt] - /** The output cache for expression values - * - * The output cache is computed based on the cache values `last` from the - * last iteration. - * - * Both `last` and `current` are required to make sure an encountered - * expression is evaluated once in each iteration. - */ - private var current: ExprValueCache = Map.empty + class Data extends Cache[Value, Value]: /** Global cached values for expressions * * The values are only added when a fixed point is reached. * * It is intended to improve performance for computation related to warm values. */ - private var stable: ExprValueCache = Map.empty - - /** Whether the current heap is different from the last heap? - * - * `changed == false` implies that the fixed point has been reached. - */ - private var changed: Boolean = false + private var stable: ExprValueCache[Value, Value] = Map.empty /** Abstract heap stores abstract objects * @@ -320,77 +292,38 @@ object Semantic: /** Used to revert heap to last stable heap. */ private var heapStable: Heap = Map.empty - /** Used to avoid allocation, its state does not matter */ - private given MutableTreeWrapper = new MutableTreeWrapper - - def hasChanged = changed - - def get(value: Value, expr: Tree): Option[Value] = - current.get(value, expr) match - case None => stable.get(value, expr) + override def get(value: Value, expr: Tree): Option[Value] = + stable.get(value, expr) match + case None => super.get(value, expr) case res => res /** Backup the state of the cache * * All the shared data structures must be immutable. */ - def backup(): Cache = - val cache = new Cache - cache.last = this.last - cache.current = this.current + def backup(): Data = + val cache = new Data cache.stable = this.stable cache.heap = this.heap cache.heapStable = this.heapStable cache.changed = this.changed + cache.last = this.last + cache.current = this.current cache /** Restore state from a backup */ - def restore(cache: Cache) = + def restore(cache: Data) = + this.changed = cache.changed this.last = cache.last this.current = cache.current this.stable = cache.stable this.heap = cache.heap this.heapStable = cache.heapStable - this.changed = cache.changed - - /** Copy the value of `(value, expr)` from the last cache to the current cache - * - * It assumes the value is `Hot` if it doesn't exist in the last cache. - * - * It updates the current caches if the values change. - * - * The two caches are required because we want to make sure in a new iteration, an expression is evaluated once. - */ - def assume(value: Value, expr: Tree, cacheResult: Boolean)(fun: => Value): Contextual[Value] = - val assumeValue: Value = - last.get(value, expr) match - case Some(value) => value - case None => - this.last = last.updatedNested(value, expr, Hot) - Hot - - this.current = current.updatedNested(value, expr, assumeValue) - - val actual = fun - if actual != assumeValue then - this.changed = true - this.current = this.current.updatedNested(value, expr, actual) - else - // It's tempting to cache the value in stable, but it's unsound. - // The reason is that the current value may depend on other values - // which might change. - // - // stable.put(value, expr, actual) - () - end if - - actual - end assume /** Commit current cache to stable cache. */ private def commitToStableCache() = for - (v, m) <- current + (v, m) <- this.current if v.isWarm // It's useless to cache value for ThisRef. (wrapper, res) <- m do @@ -404,10 +337,8 @@ object Semantic: * * 3. Revert heap to stable. */ - def prepareForNextIteration()(using Context) = - this.changed = false - this.last = this.current - this.current = Map.empty + override def prepareForNextIteration()(using Context) = + super.prepareForNextIteration() this.heap = this.heapStable /** Prepare for checking next class @@ -421,15 +352,15 @@ object Semantic: * 4. Reset last cache. */ def prepareForNextClass()(using Context) = - if this.changed then - this.changed = false + if this.hasChanged then this.heap = this.heapStable else this.commitToStableCache() this.heapStable = this.heap - this.last = Map.empty - this.current = Map.empty + // reset changed and cache + super.prepareForNextIteration() + def updateObject(ref: Ref, obj: Objekt) = assert(!this.heapStable.contains(ref)) @@ -438,59 +369,19 @@ object Semantic: def containsObject(ref: Ref) = heap.contains(ref) def getObject(ref: Ref) = heap(ref) - end Cache - - extension (cache: ExprValueCache) - private def get(value: Value, expr: Tree)(using queryWrapper: MutableTreeWrapper): Option[Value] = - queryWrapper.queryTree = expr - cache.get(value).flatMap(_.get(queryWrapper)) - - private def removed(value: Value, expr: Tree)(using queryWrapper: MutableTreeWrapper) = - queryWrapper.queryTree = expr - val innerMap2 = cache(value).removed(queryWrapper) - cache.updated(value, innerMap2) - - private def updatedNested(value: Value, expr: Tree, result: Value): ExprValueCache = - val wrapper = new ImmutableTreeWrapper(expr) - updatedNestedWrapper(value, wrapper, result) - - private def updatedNestedWrapper(value: Value, wrapper: ImmutableTreeWrapper, result: Value): ExprValueCache = - val innerMap = cache.getOrElse(value, Map.empty[TreeWrapper, Value]) - val innerMap2 = innerMap.updated(wrapper, result) - cache.updated(value, innerMap2) - end extension - end Cache + end Data - import Cache.* + end Cache - inline def cache(using c: Cache): Cache = c + inline def cache(using c: Cache.Data): Cache.Data = c // ----- Checker State ----------------------------------- /** The state that threads through the interpreter */ - type Contextual[T] = (Context, Trace, Promoted, Cache, Reporter) ?=> T + type Contextual[T] = (Context, Trace, Promoted, Cache.Data, Reporter) ?=> T // ----- Error Handling ----------------------------------- - object Trace: - opaque type Trace = Vector[Tree] - - val empty: Trace = Vector.empty - - extension (trace: Trace) - def add(node: Tree): Trace = trace :+ node - def toVector: Vector[Tree] = trace - - def show(using trace: Trace, ctx: Context): String = buildStacktrace(trace, "\n") - - def position(using trace: Trace): Tree = trace.last - type Trace = Trace.Trace - - import Trace.* - def trace(using t: Trace): Trace = t - inline def withTrace[T](t: Trace)(op: Trace ?=> T): T = op(using t) - inline def extendTrace[T](node: Tree)(using t: Trace)(op: Trace ?=> T): T = op(using t.add(node)) - /** Error reporting */ trait Reporter: def report(err: Error): Unit @@ -508,7 +399,7 @@ object Semantic: /** * Revert the cache to previous state. */ - def abort()(using Cache): Unit + def abort()(using Cache.Data): Unit def errors: List[Error] object Reporter: @@ -517,8 +408,8 @@ object Semantic: def errors = buf.toList def report(err: Error) = buf += err - class TryBufferedReporter(backup: Cache) extends BufferedReporter with TryReporter: - def abort()(using Cache): Unit = cache.restore(backup) + class TryBufferedReporter(backup: Cache.Data) extends BufferedReporter with TryReporter: + def abort()(using Cache.Data): Unit = cache.restore(backup) class ErrorFound(val error: Error) extends Exception class StopEarlyReporter extends Reporter: @@ -529,7 +420,7 @@ object Semantic: * The TryReporter cannot be thrown away: either `abort` must be called or * the errors must be reported. */ - def errorsIn(fn: Reporter ?=> Unit)(using Cache): TryReporter = + def errorsIn(fn: Reporter ?=> Unit)(using Cache.Data): TryReporter = val reporter = new TryBufferedReporter(cache.backup()) fn(using reporter) reporter @@ -544,7 +435,7 @@ object Semantic: catch case ex: ErrorFound => ex.error :: Nil - def hasErrors(fn: Reporter ?=> Unit)(using Cache): Boolean = + def hasErrors(fn: Reporter ?=> Unit)(using Cache.Data): Boolean = val backup = cache.backup() val errors = stopEarly(fn) cache.restore(backup) @@ -581,7 +472,7 @@ object Semantic: def widenArg: Contextual[Value] = a match case _: Ref | _: Fun => - val hasError = Reporter.hasErrors { a.promote("Argument cannot be promoted to hot") } + val hasError = Reporter.hasErrors { a.promote("Argument is not provably transitively initialized (Hot)") } if hasError then Cold else Hot case RefSet(refs) => @@ -606,14 +497,14 @@ object Semantic: case _ => cache.getObject(ref) - def ensureObjectExists()(using Cache): ref.type = + def ensureObjectExists()(using Cache.Data): ref.type = if cache.containsObject(ref) then printer.println("object " + ref + " already exists") ref else ensureFresh() - def ensureFresh()(using Cache): ref.type = + def ensureFresh()(using Cache.Data): ref.type = val obj = Objekt(ref.klass, fields = Map.empty, outers = Map(ref.klass -> ref.outer)) printer.println("reset object " + ref) cache.updateObject(ref, obj) @@ -664,7 +555,7 @@ object Semantic: Hot case Cold => - val error = AccessCold(field)(trace.toVector) + val error = AccessCold(field)(trace) reporter.report(error) Hot @@ -689,11 +580,11 @@ object Semantic: val rhs = target.defTree.asInstanceOf[ValOrDefDef].rhs eval(rhs, ref, target.owner.asClass, cacheResult = true) else - val error = CallUnknown(field)(trace.toVector) + val error = CallUnknown(field)(trace) reporter.report(error) Hot else - val error = AccessNonInit(target)(trace.toVector) + val error = AccessNonInit(target)(trace) reporter.report(error) Hot else @@ -779,7 +670,7 @@ object Semantic: case Cold => promoteArgs() - val error = CallCold(meth)(trace.toVector) + val error = CallCold(meth)(trace) reporter.report(error) Hot @@ -818,9 +709,11 @@ object Semantic: // no source code available promoteArgs() // try promoting the receiver as last resort - val hasErrors = Reporter.hasErrors { ref.promote("try promote value to hot") } + val hasErrors = Reporter.hasErrors { + ref.promote(ref.show + " has no source code and is not provably transitively initialized (Hot).") + } if hasErrors then - val error = CallUnknown(target)(trace.toVector) + val error = CallUnknown(target)(trace) reporter.report(error) Hot else if target.exists then @@ -855,7 +748,7 @@ object Semantic: // init "fake" param fields for parameters of primary and secondary constructors def addParamsAsFields(args: List[Value], ref: Ref, ctorDef: DefDef) = val params = ctorDef.termParamss.flatten.map(_.symbol) - assert(args.size == params.size, "arguments = " + args.size + ", params = " + params.size) + assert(args.size == params.size, "arguments = " + args.size + ", params = " + params.size + ", ctor = " + ctor.show) for (param, value) <- params.zip(args) do ref.updateField(param, value) printer.println(param.show + " initialized with " + value) @@ -899,7 +792,7 @@ object Semantic: Hot else // no source code available - val error = CallUnknown(ctor)(trace.toVector) + val error = CallUnknown(ctor)(trace) reporter.report(error) Hot } @@ -922,7 +815,7 @@ object Semantic: yield i + 1 - val error = UnsafeLeaking(errors.head, nonHotOuterClass, indices)(trace.toVector) + val error = UnsafeLeaking(errors.head, nonHotOuterClass, indices)(trace) reporter.report(error) Hot else @@ -947,7 +840,7 @@ object Semantic: tryLeak(warm, NoSymbol, args2) case Cold => - val error = CallCold(ctor)(trace.toVector) + val error = CallCold(ctor)(trace) reporter.report(error) Hot @@ -1004,7 +897,7 @@ object Semantic: case Cold => Cold - case ref: Ref => eval(vdef.rhs, ref, enclosingClass) + case ref: Ref => eval(vdef.rhs, ref, enclosingClass, cacheResult = sym.is(Flags.Lazy)) case _ => report.error("[Internal error] unexpected this value when accessing local variable, sym = " + sym.show + ", thisValue = " + thisValue2.show + Trace.show, Trace.position) @@ -1078,7 +971,7 @@ object Semantic: case Hot => case Cold => - reporter.report(PromoteError(msg)(trace.toVector)) + reporter.report(PromoteError(msg)(trace)) case thisRef: ThisRef => val emptyFields = thisRef.nonInitFields() @@ -1086,7 +979,7 @@ object Semantic: promoted.promoteCurrent(thisRef) else val fields = "Non initialized field(s): " + emptyFields.map(_.show).mkString(", ") + "." - reporter.report(PromoteError(msg + "\n" + fields)(trace.toVector)) + reporter.report(PromoteError(msg + "\n" + fields)(trace)) case warm: Warm => if !promoted.contains(warm) then @@ -1100,13 +993,13 @@ object Semantic: val errors = Reporter.stopEarly { val res = { given Trace = Trace.empty - eval(body, thisV, klass) + eval(body, thisV, klass, cacheResult = true) } given Trace = Trace.empty.add(body) - res.promote("The function return value is not hot. Found = " + res.show + ".") + res.promote("Only transitively initialized (Hot) values can be returned by functions. The function " + fun.show + " returns " + res.show + ".") } if errors.nonEmpty then - reporter.report(UnsafePromotion(msg, errors.head)(trace.toVector)) + reporter.report(UnsafePromotion(msg, errors.head)(trace)) else promoted.add(fun) @@ -1147,7 +1040,7 @@ object Semantic: // // This invariant holds because of the Scala/Java/JVM restriction that we cannot use `this` in super constructor calls. if subClassSegmentHot && !isHotSegment then - report.error("[Internal error] Expect current segment to hot in promotion, current klass = " + klass.show + + report.error("[Internal error] Expect current segment to be transitively initialized (Hot) in promotion, current klass = " + klass.show + ", subclass = " + subClass.show + Trace.show, Trace.position) // If the outer and parameters of a class are all hot, then accessing fields and methods of the current @@ -1156,20 +1049,20 @@ object Semantic: if !isHotSegment then for member <- klass.info.decls do if member.isClass then - val error = PromoteError("Promotion cancelled as the value contains inner " + member.show + ".")(Vector.empty) + val error = PromoteError("Promotion cancelled as the value contains inner " + member.show + ".")(Trace.empty) reporter.report(error) else if !member.isType && !member.isConstructor && !member.is(Flags.Deferred) then given Trace = Trace.empty if member.is(Flags.Method, butNot = Flags.Accessor) then - val args = member.info.paramInfoss.flatten.map(_ => ArgInfo(Hot, Trace.empty)) + val args = member.info.paramInfoss.flatten.map(_ => new ArgInfo(Hot: Value, Trace.empty)) val res = warm.call(member, args, receiver = warm.klass.typeRef, superType = NoType) withTrace(trace.add(member.defTree)) { - res.promote("Cannot prove that the return value of " + member.show + " is hot. Found = " + res.show + ".") + res.promote("Could not verify that the return value of " + member.show + " is transitively initialized (Hot). It was found to be " + res.show + ".") } else val res = warm.select(member, receiver = warm.klass.typeRef) withTrace(trace.add(member.defTree)) { - res.promote("Cannot prove that the field " + member.show + " is hot. Found = " + res.show + ".") + res.promote("Could not verify that the field " + member.show + " is transitively initialized (Hot). It was found to be " + res.show + ".") } end for @@ -1189,7 +1082,7 @@ object Semantic: } if errors.isEmpty then Nil - else UnsafePromotion(msg, errors.head)(trace.toVector) :: Nil + else UnsafePromotion(msg, errors.head)(trace) :: Nil } end extension @@ -1212,7 +1105,7 @@ object Semantic: * * The class to be checked must be an instantiable concrete class. */ - private def checkClass(classSym: ClassSymbol)(using Cache, Context): Unit = + private def checkClass(classSym: ClassSymbol)(using Cache.Data, Context): Unit = val thisRef = ThisRef(classSym) val tpl = classSym.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] @@ -1231,7 +1124,7 @@ object Semantic: log("checking " + classSym) { eval(tpl, thisRef, classSym) } reporter.errors.foreach(_.issue) - if cache.hasChanged && reporter.errors.isEmpty then + if cache.hasChanged && reporter.errors.isEmpty && cache.isUsed then // code to prepare cache and heap for next iteration cache.prepareForNextIteration() iterate() @@ -1246,16 +1139,16 @@ object Semantic: * Check the specified concrete classes */ def checkClasses(classes: List[ClassSymbol])(using Context): Unit = - given Cache() + given Cache.Data() for classSym <- classes if isConcreteClass(classSym) do checkClass(classSym) // ----- Semantic definition -------------------------------- + type ArgInfo = TraceValue[Value] - /** Utility definition used for better error-reporting of argument errors */ - case class ArgInfo(value: Value, trace: Trace): - def promote: Contextual[Unit] = withTrace(trace) { - value.promote("Cannot prove the method argument is hot. Only hot values are safe to leak.\nFound = " + value.show + ".") + extension (arg: ArgInfo) + def promote: Contextual[Unit] = withTrace(arg.trace) { + arg.value.promote("Could not verify that the method argument is transitively initialized (Hot). It was found to be " + arg.value.show + ". Only transitively initialized arguments may be passed to methods (except constructors).") } /** Evaluate an expression with the given value for `this` in a given class `klass` @@ -1279,10 +1172,7 @@ object Semantic: * @param cacheResult It is used to reduce the size of the cache. */ def eval(expr: Tree, thisV: Ref, klass: ClassSymbol, cacheResult: Boolean = false): Contextual[Value] = log("evaluating " + expr.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) { - cache.get(thisV, expr) match - case Some(value) => value - case None => - cache.assume(thisV, expr, cacheResult) { cases(expr, thisV, klass) } + cache.cachedEval(thisV, expr, cacheResult, default = Hot) { expr => cases(expr, thisV, klass) } } /** Evaluate a list of expressions */ @@ -1299,7 +1189,7 @@ object Semantic: else eval(arg.tree, thisV, klass) - argInfos += ArgInfo(res, trace.add(arg.tree)) + argInfos += new ArgInfo(res, trace.add(arg.tree)) } argInfos.toList @@ -1399,12 +1289,12 @@ object Semantic: eval(qual, thisV, klass) val res = eval(rhs, thisV, klass) extendTrace(expr) { - res.ensureHot("The RHS of reassignment must be hot. Found = " + res.show + ". ") + res.ensureHot("The RHS of reassignment must be transitively initialized (Hot). It was found to be " + res.show + ". ") } case id: Ident => val res = eval(rhs, thisV, klass) extendTrace(expr) { - res.ensureHot("The RHS of reassignment must be hot. Found = " + res.show + ". ") + res.ensureHot("The RHS of reassignment must be transitively initialized (Hot). It was found to be " + res.show + ". ") } case closureDef(ddef) => @@ -1427,14 +1317,14 @@ object Semantic: case Match(selector, cases) => val res = eval(selector, thisV, klass) extendTrace(selector) { - res.ensureHot("The value to be matched needs to be hot. Found = " + res.show + ". ") + res.ensureHot("The value to be matched needs to be transitively initialized (Hot). It was found to be " + res.show + ". ") } eval(cases.map(_.body), thisV, klass).join case Return(expr, from) => val res = eval(expr, thisV, klass) extendTrace(expr) { - res.ensureHot("return expression must be hot. Found = " + res.show + ". ") + res.ensureHot("return expression must be transitively initialized (Hot). It was found to be " + res.show + ". ") } case WhileDo(cond, body) => @@ -1663,9 +1553,14 @@ object Semantic: // term arguments to B. That can only be done in a concrete class. val tref = typeRefOf(klass.typeRef.baseType(mixin).typeConstructor) val ctor = tref.classSymbol.primaryConstructor - if ctor.exists then extendTrace(superParent) { - superCall(tref, ctor, Nil, tasks) - } + if ctor.exists then + // The parameter check of traits comes late in the mixin phase. + // To avoid crash we supply hot values for erroneous parent calls. + // See tests/neg/i16438.scala. + val args: List[ArgInfo] = ctor.info.paramInfoss.flatten.map(_ => new ArgInfo(Hot, Trace.empty)) + extendTrace(superParent) { + superCall(tref, ctor, args, tasks) + } } // initialize super classes after outers are set @@ -1721,85 +1616,3 @@ object Semantic: traverseChildren(tp) traverser.traverse(tpt.tpe) - -// ----- Utility methods and extractors -------------------------------- - - def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match - case tref: TypeRef => tref - case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) - - - opaque type Arg = Tree | ByNameArg - case class ByNameArg(tree: Tree) - - extension (arg: Arg) - def isByName = arg.isInstanceOf[ByNameArg] - def tree: Tree = arg match - case t: Tree => t - case ByNameArg(t) => t - - object Call: - - def unapply(tree: Tree)(using Context): Option[(Tree, List[List[Arg]])] = - tree match - case Apply(fn, args) => - val argTps = fn.tpe.widen match - case mt: MethodType => mt.paramInfos - val normArgs: List[Arg] = args.zip(argTps).map { - case (arg, _: ExprType) => ByNameArg(arg) - case (arg, _) => arg - } - unapply(fn) match - case Some((ref, args0)) => Some((ref, args0 :+ normArgs)) - case None => None - - case TypeApply(fn, targs) => - unapply(fn) - - case ref: RefTree if ref.tpe.widenSingleton.isInstanceOf[MethodicType] => - Some((ref, Nil)) - - case _ => None - - object NewExpr: - def unapply(tree: Tree)(using Context): Option[(TypeRef, New, Symbol, List[List[Arg]])] = - tree match - case Call(fn @ Select(newTree: New, init), argss) if init == nme.CONSTRUCTOR => - val tref = typeRefOf(newTree.tpe) - Some((tref, newTree, fn.symbol, argss)) - case _ => None - - object PolyFun: - def unapply(tree: Tree)(using Context): Option[Tree] = - tree match - case Block((cdef: TypeDef) :: Nil, Typed(NewExpr(tref, _, _, _), _)) - if tref.symbol.isAnonymousClass && tref <:< defn.PolyFunctionType - => - val body = cdef.rhs.asInstanceOf[Template].body - val apply = body.head.asInstanceOf[DefDef] - Some(apply.rhs) - case _ => - None - - extension (symbol: Symbol) def hasSource(using Context): Boolean = - !symbol.defTree.isEmpty - - def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show) { - if (sym.isEffectivelyFinal || sym.isConstructor) sym - else sym.matchingMember(cls.appliedRef) - } - - private def isConcreteClass(cls: ClassSymbol)(using Context) = { - val instantiable: Boolean = - cls.is(Flags.Module) || - !cls.isOneOf(Flags.AbstractOrTrait) && { - // see `Checking.checkInstantiable` in typer - val tp = cls.appliedRef - val stp = SkolemType(tp) - val selfType = cls.givenSelfType.asSeenFrom(stp, cls) - !selfType.exists || stp <:< selfType - } - - // A concrete class may not be instantiated if the self type is not satisfied - instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass - } diff --git a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala new file mode 100644 index 000000000000..7dfbc0b6cfa5 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala @@ -0,0 +1,82 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* +import ast.tpd.* +import util.SourcePosition + +import Decorators._, printing.SyntaxHighlighting + +import scala.collection.mutable + +/** Logic related to evaluation trace for showing friendly error messages + * + * A trace is a sequence of program positions which tells the evaluation order + * that leads to an error. It is usually more informative than the stack trace + * by tracking the exact sub-expression in the trace instead of only methods. + */ +object Trace: + opaque type Trace = Vector[Tree] + + val empty: Trace = Vector.empty + + extension (trace: Trace) + def add(node: Tree): Trace = trace :+ node + def toVector: Vector[Tree] = trace + def ++(trace2: Trace): Trace = trace ++ trace2 + + def show(using trace: Trace, ctx: Context): String = buildStacktrace(trace, "\n") + + def position(using trace: Trace): Tree = trace.last + + def trace(using t: Trace): Trace = t + + inline def withTrace[T](t: Trace)(op: Trace ?=> T): T = op(using t) + + inline def extendTrace[T](node: Tree)(using t: Trace)(op: Trace ?=> T): T = op(using t.add(node)) + + def buildStacktrace(trace: Trace, preamble: String)(using Context): String = if trace.isEmpty then "" else preamble + { + var lastLineNum = -1 + var lines: mutable.ArrayBuffer[String] = new mutable.ArrayBuffer + trace.foreach { tree => + val pos = tree.sourcePos + val prefix = "-> " + val line = + if pos.source.exists then + val loc = "[ " + pos.source.file.name + ":" + (pos.line + 1) + " ]" + val code = SyntaxHighlighting.highlight(pos.lineContent.trim.nn) + i"$code\t$loc" + else + tree.show + val positionMarkerLine = + if pos.exists && pos.source.exists then + positionMarker(pos) + else "" + + // always use the more precise trace location + if lastLineNum == pos.line then + lines.dropRightInPlace(1) + + lines += (prefix + line + "\n" + positionMarkerLine) + + lastLineNum = pos.line + } + val sb = new StringBuilder + for line <- lines do sb.append(line) + sb.toString + } + + /** Used to underline source positions in the stack trace + * pos.source must exist + */ + private def positionMarker(pos: SourcePosition): String = + val trimmed = pos.lineContent.takeWhile(c => c.isWhitespace).length + val padding = pos.startColumnPadding.substring(trimmed).nn + " " + val carets = + if (pos.startLine == pos.endLine) + "^" * math.max(1, pos.endColumn - pos.startColumn) + else "^" + + s"$padding$carets\n" diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala new file mode 100644 index 000000000000..ba2216504aef --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -0,0 +1,100 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* +import Types.* +import Symbols.* +import StdNames.* +import ast.tpd.* + +import reporting.trace as log +import config.Printers.init as printer + +import Trace.* + +object Util: + /** Utility definition used for better error-reporting of argument errors */ + case class TraceValue[T](value: T, trace: Trace) + + def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match + case tref: TypeRef => tref + case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) + + + opaque type Arg = Tree | ByNameArg + case class ByNameArg(tree: Tree) + + extension (arg: Arg) + def isByName = arg.isInstanceOf[ByNameArg] + def tree: Tree = arg match + case t: Tree => t + case ByNameArg(t) => t + + object Call: + + def unapply(tree: Tree)(using Context): Option[(Tree, List[List[Arg]])] = + tree match + case Apply(fn, args) => + val argTps = fn.tpe.widen match + case mt: MethodType => mt.paramInfos + val normArgs: List[Arg] = args.zip(argTps).map { + case (arg, _: ExprType) => ByNameArg(arg) + case (arg, _) => arg + } + unapply(fn) match + case Some((ref, args0)) => Some((ref, args0 :+ normArgs)) + case None => None + + case TypeApply(fn, targs) => + unapply(fn) + + case ref: RefTree if ref.tpe.widenSingleton.isInstanceOf[MethodicType] => + Some((ref, Nil)) + + case _ => None + + object NewExpr: + def unapply(tree: Tree)(using Context): Option[(TypeRef, New, Symbol, List[List[Arg]])] = + tree match + case Call(fn @ Select(newTree: New, init), argss) if init == nme.CONSTRUCTOR => + val tref = typeRefOf(newTree.tpe) + Some((tref, newTree, fn.symbol, argss)) + case _ => None + + object PolyFun: + def unapply(tree: Tree)(using Context): Option[Tree] = + tree match + case Block((cdef: TypeDef) :: Nil, Typed(NewExpr(tref, _, _, _), _)) + if tref.symbol.isAnonymousClass && tref <:< defn.PolyFunctionType + => + val body = cdef.rhs.asInstanceOf[Template].body + val apply = body.head.asInstanceOf[DefDef] + Some(apply.rhs) + case _ => + None + + def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show): + if sym.isEffectivelyFinal then sym + else sym.matchingMember(cls.appliedRef) + + extension (sym: Symbol) + def hasSource(using Context): Boolean = !sym.defTree.isEmpty + + def isStaticObject(using Context) = + sym.is(Flags.Module, butNot = Flags.Package) && sym.isStatic + + def isConcreteClass(cls: ClassSymbol)(using Context) = + val instantiable: Boolean = + cls.is(Flags.Module) || + !cls.isOneOf(Flags.AbstractOrTrait) && { + // see `Checking.checkInstantiable` in typer + val tp = cls.appliedRef + val stp = SkolemType(tp) + val selfType = cls.givenSelfType.asSeenFrom(stp, cls) + !selfType.exists || stp <:< selfType + } + + // A concrete class may not be instantiated if the self type is not satisfied + instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 8e891f822255..eab65890c227 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -9,7 +9,7 @@ import TypeUtils._ import Contexts._ import Flags._ import ast._ -import Decorators._ +import Decorators.{ show => _, * } import Symbols._ import StdNames._ import NameOps._ @@ -22,9 +22,13 @@ import transform.SymUtils._ import reporting._ import config.Printers.{exhaustivity => debug} import util.{SrcPos, NoSourcePosition} -import collection.mutable -/** Space logic for checking exhaustivity and unreachability of pattern matching +import scala.annotation.internal.sharable +import scala.collection.mutable + +import SpaceEngine.* + +/* Space logic for checking exhaustivity and unreachability of pattern matching * * Space can be thought of as a set of possible values. A type or a pattern * both refer to spaces. The space of a type is the values that inhabit the @@ -53,9 +57,32 @@ import collection.mutable * */ - /** space definition */ -sealed trait Space +sealed trait Space: + + @sharable private val isSubspaceCache = mutable.HashMap.empty[Space, Boolean] + + def isSubspace(b: Space)(using Context): Boolean = + val a = this + val a2 = a.simplify + val b2 = b.simplify + if (a ne a2) || (b ne b2) then a2.isSubspace(b2) + else if a == Empty then true + else if b == Empty then false + else trace(s"isSubspace(${show(this)}, ${show(b)})", debug) { + isSubspaceCache.getOrElseUpdate(b, computeIsSubspace(a, b)) + } + + @sharable private var mySimplified: Space | Null = null + + def simplify(using Context): Space = + val simplified = mySimplified + if simplified == null then + val simplified = SpaceEngine.computeSimplify(this) + mySimplified = simplified + simplified + else simplified +end Space /** Empty space */ case object Empty extends Space @@ -66,7 +93,21 @@ case object Empty extends Space * @param decomposed: does the space result from decomposition? Used for pretty print * */ -case class Typ(tp: Type, decomposed: Boolean = true) extends Space +case class Typ(tp: Type, decomposed: Boolean = true) extends Space: + private var myDecompose: List[Typ] | Null = null + + def canDecompose(using Context): Boolean = decompose != ListOfTypNoType + + def decompose(using Context): List[Typ] = + val decompose = myDecompose + if decompose == null then + val decompose = tp match + case Parts(parts) => parts.map(Typ(_, decomposed = true)) + case _ => ListOfTypNoType + myDecompose = decompose + decompose + else decompose +end Typ /** Space representing an extractor pattern */ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space @@ -74,59 +115,28 @@ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space /** Union of spaces */ case class Or(spaces: Seq[Space]) extends Space -/** abstract space logic */ -trait SpaceLogic { - /** Is `tp1` a subtype of `tp2`? */ - def isSubType(tp1: Type, tp2: Type): Boolean - - /** True if we can assume that the two unapply methods are the same. - * That is, given the same parameter, they return the same result. - * - * We assume that unapply methods are pure, but the same method may - * be called with different prefixes, thus behaving differently. - */ - def isSameUnapply(tp1: TermRef, tp2: TermRef): Boolean - - /** Return a space containing the values of both types. - * - * The types should be atomic (non-decomposable) and unrelated (neither - * should be a subtype of the other). - */ - def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type): Space - - /** Is the type `tp` decomposable? i.e. all values of the type can be covered - * by its decomposed types. - * - * Abstract sealed class, OrType, Boolean and Java enums can be decomposed. - */ - def canDecompose(tp: Type): Boolean - - /** Return term parameter types of the extractor `unapp` */ - def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int): List[Type] - - /** Get components of decomposable types */ - def decompose(tp: Type): List[Typ] - - /** Whether the extractor covers the given type */ - def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int): Boolean +object SpaceEngine { + import tpd._ - /** Display space in string format */ - def show(sp: Space): String + def simplify(space: Space)(using Context): Space = space.simplify + def isSubspace(a: Space, b: Space)(using Context): Boolean = a.isSubspace(b) + def canDecompose(typ: Typ)(using Context): Boolean = typ.canDecompose + def decompose(typ: Typ)(using Context): List[Typ] = typ.decompose /** Simplify space such that a space equal to `Empty` becomes `Empty` */ - def simplify(space: Space)(using Context): Space = trace(s"simplify ${show(space)} --> ", debug, show)(space match { + def computeSimplify(space: Space)(using Context): Space = trace(s"simplify ${show(space)} --> ", debug, show)(space match { case Prod(tp, fun, spaces) => - val sps = spaces.map(simplify(_)) - if (sps.contains(Empty)) Empty - else if (canDecompose(tp) && decompose(tp).isEmpty) Empty - else Prod(tp, fun, sps) + val sps = spaces.mapconserve(simplify) + if sps.contains(Empty) then Empty + else if decompose(tp).isEmpty then Empty + else if sps eq spaces then space else Prod(tp, fun, sps) case Or(spaces) => - val spaces2 = spaces.map(simplify(_)).filter(_ != Empty) + val spaces2 = spaces.map(simplify).filter(_ != Empty) if spaces2.isEmpty then Empty - else if spaces2.lengthCompare(1) == 0 then spaces2.head - else Or(spaces2) - case Typ(tp, _) => - if (canDecompose(tp) && decompose(tp).isEmpty) Empty + else if spaces2.lengthIs == 1 then spaces2.head + else if spaces2.corresponds(spaces)(_ eq _) then space else Or(spaces2) + case typ: Typ => + if decompose(typ).isEmpty then Empty else space case _ => space }) @@ -164,119 +174,98 @@ trait SpaceLogic { List(space) } - /** Is `a` a subspace of `b`? Equivalent to `a - b == Empty`, but faster */ - def isSubspace(a: Space, b: Space)(using Context): Boolean = trace(s"isSubspace(${show(a)}, ${show(b)})", debug) { - def tryDecompose1(tp: Type) = canDecompose(tp) && isSubspace(Or(decompose(tp)), b) - def tryDecompose2(tp: Type) = canDecompose(tp) && isSubspace(a, Or(decompose(tp))) - - (simplify(a), simplify(b)) match { + /** Is `a` a subspace of `b`? Equivalent to `simplify(simplify(a) - simplify(b)) == Empty`, but faster */ + def computeIsSubspace(a: Space, b: Space)(using Context): Boolean = { + val a2 = simplify(a) + val b2 = simplify(b) + if (a ne a2) || (b ne b2) then isSubspace(a2, b2) + else (a, b) match { case (Empty, _) => true case (_, Empty) => false - case (Or(ss), _) => - ss.forall(isSubspace(_, b)) - case (Typ(tp1, _), Typ(tp2, _)) => + case (Or(ss), _) => ss.forall(isSubspace(_, b)) + case (a @ Typ(tp1, _), Or(ss)) => // optimization: don't go to subtraction too early + ss.exists(isSubspace(a, _)) + || canDecompose(a) && isSubspace(Or(decompose(a)), b) + case (_, Or(_)) => simplify(minus(a, b)) == Empty + case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => isSubType(tp1, tp2) - || canDecompose(tp1) && tryDecompose1(tp1) - || canDecompose(tp2) && tryDecompose2(tp2) - case (Typ(tp1, _), Or(ss)) => // optimization: don't go to subtraction too early - ss.exists(isSubspace(a, _)) || tryDecompose1(tp1) - case (_, Or(_)) => - simplify(minus(a, b)) == Empty + || canDecompose(a) && isSubspace(Or(decompose(a)), b) + || canDecompose(b) && isSubspace(a, Or(decompose(b))) case (Prod(tp1, _, _), Typ(tp2, _)) => isSubType(tp1, tp2) case (Typ(tp1, _), Prod(tp2, fun, ss)) => isSubType(tp1, tp2) && covers(fun, tp1, ss.length) - && isSubspace(Prod(tp2, fun, signature(fun, tp2, ss.length).map(Typ(_, false))), b) + && isSubspace(Prod(tp2, fun, signature(fun, tp1, ss.length).map(Typ(_, false))), b) case (Prod(_, fun1, ss1), Prod(_, fun2, ss2)) => - isSameUnapply(fun1, fun2) && ss1.zip(ss2).forall((isSubspace _).tupled) + isSameUnapply(fun1, fun2) && ss1.lazyZip(ss2).forall(isSubspace) } } /** Intersection of two spaces */ def intersect(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} & ${show(b)}", debug, show) { - def tryDecompose1(tp: Type) = intersect(Or(decompose(tp)), b) - def tryDecompose2(tp: Type) = intersect(a, Or(decompose(tp))) - (a, b) match { case (Empty, _) | (_, Empty) => Empty case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filter(_ ne Empty)) case (Or(ss), _) => Or(ss.map(intersect(_, b)).filter(_ ne Empty)) - case (Typ(tp1, _), Typ(tp2, _)) => - if (isSubType(tp1, tp2)) a - else if (isSubType(tp2, tp1)) b - else if (canDecompose(tp1)) tryDecompose1(tp1) - else if (canDecompose(tp2)) tryDecompose2(tp2) - else intersectUnrelatedAtomicTypes(tp1, tp2) - case (Typ(tp1, _), Prod(tp2, fun, ss)) => - if (isSubType(tp2, tp1)) b - else if (canDecompose(tp1)) tryDecompose1(tp1) - else if (isSubType(tp1, tp2)) a // problematic corner case: inheriting a case class - else intersectUnrelatedAtomicTypes(tp1, tp2) match - case Typ(tp, _) => Prod(tp, fun, ss) - case sp => sp - case (Prod(tp1, fun, ss), Typ(tp2, _)) => - if (isSubType(tp1, tp2)) a - else if (canDecompose(tp2)) tryDecompose2(tp2) - else if (isSubType(tp2, tp1)) a // problematic corner case: inheriting a case class - else intersectUnrelatedAtomicTypes(tp1, tp2) match - case Typ(tp, _) => Prod(tp, fun, ss) - case sp => sp - case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => - if (!isSameUnapply(fun1, fun2)) intersectUnrelatedAtomicTypes(tp1, tp2) match - case Typ(tp, _) => Prod(tp, fun1, ss1) - case sp => sp - else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty - else Prod(tp1, fun1, ss1.zip(ss2).map((intersect _).tupled)) + case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => + if isSubType(tp1, tp2) then a + else if isSubType(tp2, tp1) then b + else if canDecompose(a) then intersect(Or(decompose(a)), b) + else if canDecompose(b) then intersect(a, Or(decompose(b))) + else intersectUnrelatedAtomicTypes(tp1, tp2)(a) + case (a @ Typ(tp1, _), Prod(tp2, fun, ss)) => + if isSubType(tp2, tp1) then b + else if canDecompose(a) then intersect(Or(decompose(a)), b) + else if isSubType(tp1, tp2) then a // problematic corner case: inheriting a case class + else intersectUnrelatedAtomicTypes(tp1, tp2)(b) + case (Prod(tp1, fun, ss), b @ Typ(tp2, _)) => + if isSubType(tp1, tp2) then a + else if canDecompose(b) then intersect(a, Or(decompose(b))) + else if isSubType(tp2, tp1) then a // problematic corner case: inheriting a case class + else intersectUnrelatedAtomicTypes(tp1, tp2)(a) + case (a @ Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => + if !isSameUnapply(fun1, fun2) then intersectUnrelatedAtomicTypes(tp1, tp2)(a) + else if ss1.lazyZip(ss2).exists((a, b) => simplify(intersect(a, b)) == Empty) then Empty + else Prod(tp1, fun1, ss1.lazyZip(ss2).map(intersect)) } } /** The space of a not covered by b */ def minus(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} - ${show(b)}", debug, show) { - def tryDecompose1(tp: Type) = minus(Or(decompose(tp)), b) - def tryDecompose2(tp: Type) = minus(a, Or(decompose(tp))) - (a, b) match { case (Empty, _) => Empty case (_, Empty) => a - case (Typ(tp1, _), Typ(tp2, _)) => - if (isSubType(tp1, tp2)) Empty - else if (canDecompose(tp1)) tryDecompose1(tp1) - else if (canDecompose(tp2)) tryDecompose2(tp2) + case (Or(ss), _) => Or(ss.map(minus(_, b))) + case (_, Or(ss)) => ss.foldLeft(a)(minus) + case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => + if isSubType(tp1, tp2) then Empty + else if canDecompose(a) then minus(Or(decompose(a)), b) + else if canDecompose(b) then minus(a, Or(decompose(b))) else a - case (Typ(tp1, _), Prod(tp2, fun, ss)) => + case (a @ Typ(tp1, _), Prod(tp2, fun, ss)) => // rationale: every instance of `tp1` is covered by `tp2(_)` if isSubType(tp1, tp2) && covers(fun, tp1, ss.length) then minus(Prod(tp1, fun, signature(fun, tp1, ss.length).map(Typ(_, false))), b) - else if canDecompose(tp1) then - tryDecompose1(tp1) - else - a - case (Or(ss), _) => - Or(ss.map(minus(_, b))) - case (_, Or(ss)) => - ss.foldLeft(a)(minus) - case (Prod(tp1, fun, ss), Typ(tp2, _)) => + else if canDecompose(a) then minus(Or(decompose(a)), b) + else a + case (Prod(tp1, fun, ss), b @ Typ(tp2, _)) => // uncovered corner case: tp2 :< tp1, may happen when inheriting case class - if (isSubType(tp1, tp2)) - Empty - else if (simplify(a) == Empty) - Empty - else if (canDecompose(tp2)) - tryDecompose2(tp2) - else - a - case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => - if (!isSameUnapply(fun1, fun2)) return a - if (fun1.symbol.name == nme.unapply && ss1.length != ss2.length) return a - - val range = (0 until ss1.size).toList + if isSubType(tp1, tp2) then Empty + else if simplify(a) == Empty then Empty + else if canDecompose(b) then minus(a, Or(decompose(b))) + else a + case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) + if !isSameUnapply(fun1, fun2) => a + case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) + if fun1.symbol.name == nme.unapply && ss1.length != ss2.length => a + case (a @ Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => + val range = ss1.indices.toList val cache = Array.fill[Space | Null](ss2.length)(null) def sub(i: Int) = if cache(i) == null then cache(i) = minus(ss1(i), ss2(i)) cache(i).nn - end sub if range.exists(i => isSubspace(ss1(i), sub(i))) then a else if cache.forall(sub => isSubspace(sub.nn, Empty)) then Empty @@ -288,9 +277,6 @@ trait SpaceLogic { Or(spaces) } } -} - -object SpaceEngine { /** Is the unapply or unapplySeq irrefutable? * @param unapp The unapply function reference @@ -317,26 +303,42 @@ object SpaceEngine { case funRef: TermRef => isIrrefutable(funRef, argLen) case _: ErrorType => false } -} -/** Scala implementation of space logic */ -class SpaceEngine(using Context) extends SpaceLogic { - import tpd._ - - private val scalaSeqFactoryClass = defn.SeqFactoryClass - private val scalaListType = defn.ListClass.typeRef - private val scalaNilType = defn.NilModule.termRef - private val scalaConsType = defn.ConsClass.typeRef - - private val constantNullType = ConstantType(Constant(null)) + /** Is this an `'{..}` or `'[..]` irrefutable quoted patterns? + * @param unapp The unapply function tree + * @param implicits The implicits of the unapply + * @param pt The scrutinee type + */ + def isIrrefutableQuotedPattern(unapp: tpd.Tree, implicits: List[tpd.Tree], pt: Type)(using Context): Boolean = { + implicits.headOption match + // pattern '{ $x: T } + case Some(tpd.Apply(tpd.Select(tpd.Quote(tpd.TypeApply(fn, List(tpt)), _), nme.apply), _)) + if unapp.symbol.owner.eq(defn.QuoteMatching_ExprMatchModule) + && fn.symbol.eq(defn.QuotedRuntimePatterns_patternHole) => + pt <:< defn.QuotedExprClass.typeRef.appliedTo(tpt.tpe) + + // pattern '[T] + case Some(tpd.Apply(tpd.TypeApply(fn, List(tpt)), _)) + if unapp.symbol.owner.eq(defn.QuoteMatching_TypeMatchModule) => + pt =:= defn.QuotedTypeClass.typeRef.appliedTo(tpt.tpe) + + case _ => false + } - override def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type): Space = trace(s"atomic intersection: ${AndType(tp1, tp2).show}", debug) { + /** Return a space containing the values of both types. + * + * The types should be atomic (non-decomposable) and unrelated (neither + * should be a subtype of the other). + */ + def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type)(sp: Space)(using Context): Space = trace(i"atomic intersection: ${AndType(tp1, tp2)}", debug, show) { // Precondition: !isSubType(tp1, tp2) && !isSubType(tp2, tp1). if !ctx.mode.is(Mode.SafeNulls) && (tp1.isNullType || tp2.isNullType) then // Since projections of types don't include null, intersection with null is empty. Empty else - val intersection = Typ(AndType(tp1, tp2), decomposed = false) + val intersection = sp match + case sp: Prod => sp.copy(AndType(tp1, tp2)) + case _ => Typ(AndType(tp1, tp2), decomposed = false) // unrelated numeric value classes can equal each other, so let's not consider type space intersection empty if tp1.classSymbol.isNumericValueClass && tp2.classSymbol.isNumericValueClass then intersection else if isPrimToBox(tp1, tp2) || isPrimToBox(tp2, tp1) then intersection @@ -345,7 +347,7 @@ class SpaceEngine(using Context) extends SpaceLogic { } /** Return the space that represents the pattern `pat` */ - def project(pat: Tree): Space = pat match { + def project(pat: Tree)(using Context): Space = trace(i"project($pat ${pat.className} ${pat.tpe})", debug, show)(pat match { case Literal(c) => if (c.value.isInstanceOf[Symbol]) Typ(c.value.asInstanceOf[Symbol].termRef, decomposed = false) @@ -372,7 +374,7 @@ class SpaceEngine(using Context) extends SpaceLogic { val funRef = fun1.tpe.asInstanceOf[TermRef] if (fun.symbol.name == nme.unapplySeq) val (arity, elemTp, resultTp) = unapplySeqInfo(fun.tpe.widen.finalResultType, fun.srcPos) - if (fun.symbol.owner == scalaSeqFactoryClass && scalaListType.appliedTo(elemTp) <:< pat.tpe) + if (fun.symbol.owner == defn.SeqFactoryClass && defn.ListType.appliedTo(elemTp) <:< pat.tpe) // The exhaustivity and reachability logic already handles decomposing sum types (into its subclasses) // and product types (into its components). To get better counter-examples for patterns that are of type // List (or a super-type of list, like LinearSeq) we project them into spaces that use `::` and Nil. @@ -406,14 +408,14 @@ class SpaceEngine(using Context) extends SpaceLogic { case _ => // Pattern is an arbitrary expression; assume a skolem (i.e. an unknown value) of the pattern type Typ(pat.tpe.narrow, decomposed = false) - } + }) - private def project(tp: Type): Space = tp match { + private def project(tp: Type)(using Context): Space = tp match { case OrType(tp1, tp2) => Or(project(tp1) :: project(tp2) :: Nil) case tp => Typ(tp, decomposed = true) } - private def unapplySeqInfo(resTp: Type, pos: SrcPos): (Int, Type, Type) = { + private def unapplySeqInfo(resTp: Type, pos: SrcPos)(using Context): (Int, Type, Type) = { var resultTp = resTp var elemTp = unapplySeqTypeElemTp(resultTp) var arity = productArity(resultTp, pos) @@ -460,15 +462,14 @@ class SpaceEngine(using Context) extends SpaceLogic { * If `isValue` is true, then pattern-bound symbols are erased to its upper bound. * This is needed to avoid spurious unreachable warnings. See tests/patmat/i6197.scala. */ - private def erase(tp: Type, inArray: Boolean = false, isValue: Boolean = false): Type = trace(i"$tp erased to", debug) { + private def erase(tp: Type, inArray: Boolean = false, isValue: Boolean = false)(using Context): Type = + trace(i"erase($tp${if inArray then " inArray" else ""}${if isValue then " isValue" else ""})", debug)(tp match { + case tp @ AppliedType(tycon, args) if tycon.typeSymbol.isPatternBound => + WildcardType - tp match { case tp @ AppliedType(tycon, args) => - if tycon.typeSymbol.isPatternBound then return WildcardType - - val args2 = - if (tycon.isRef(defn.ArrayClass)) args.map(arg => erase(arg, inArray = true, isValue = false)) - else args.map(arg => erase(arg, inArray = false, isValue = false)) + val inArray = tycon.isRef(defn.ArrayClass) + val args2 = args.map(arg => erase(arg, inArray = inArray, isValue = false)) tp.derivedAppliedType(erase(tycon, inArray, isValue = false), args2) case tp @ OrType(tp1, tp2) => @@ -486,48 +487,49 @@ class SpaceEngine(using Context) extends SpaceLogic { else WildcardType case _ => tp - } - } + }) /** Space of the pattern: unapplySeq(a, b, c: _*) */ - def projectSeq(pats: List[Tree]): Space = { - if (pats.isEmpty) return Typ(scalaNilType, false) + def projectSeq(pats: List[Tree])(using Context): Space = { + if (pats.isEmpty) return Typ(defn.NilType, false) val (items, zero) = if (isWildcardStarArg(pats.last)) - (pats.init, Typ(scalaListType.appliedTo(pats.last.tpe.elemType), false)) + (pats.init, Typ(defn.ListType.appliedTo(pats.last.tpe.elemType), false)) else - (pats, Typ(scalaNilType, false)) + (pats, Typ(defn.NilType, false)) - val unapplyTp = scalaConsType.classSymbol.companionModule.termRef.select(nme.unapply) + val unapplyTp = defn.ConsType.classSymbol.companionModule.termRef.select(nme.unapply) items.foldRight[Space](zero) { (pat, acc) => - val consTp = scalaConsType.appliedTo(pats.head.tpe.widen) + val consTp = defn.ConsType.appliedTo(pats.head.tpe.widen) Prod(consTp, unapplyTp, project(pat) :: acc :: Nil) } } - def isPrimToBox(tp: Type, pt: Type): Boolean = + def isPrimToBox(tp: Type, pt: Type)(using Context): Boolean = tp.isPrimitiveValueType && (defn.boxedType(tp).classSymbol eq pt.classSymbol) - private val isSubspaceCache = mutable.HashMap.empty[(Space, Space, Context), Boolean] - - override def isSubspace(a: Space, b: Space)(using Context): Boolean = - isSubspaceCache.getOrElseUpdate((a, b, ctx), super.isSubspace(a, b)) - /** Is `tp1` a subtype of `tp2`? */ - def isSubType(tp1: Type, tp2: Type): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true) { - if tp1 == constantNullType && !ctx.mode.is(Mode.SafeNulls) - then tp2 == constantNullType + def isSubType(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true) { + if tp1 == ConstantType(Constant(null)) && !ctx.mode.is(Mode.SafeNulls) + then tp2 == ConstantType(Constant(null)) else tp1 <:< tp2 } - def isSameUnapply(tp1: TermRef, tp2: TermRef): Boolean = + /** True if we can assume that the two unapply methods are the same. + * That is, given the same parameter, they return the same result. + * + * We assume that unapply methods are pure, but the same method may + * be called with different prefixes, thus behaving differently. + */ + def isSameUnapply(tp1: TermRef, tp2: TermRef)(using Context): Boolean = // always assume two TypeTest[S, T].unapply are the same if they are equal in types (tp1.prefix.isStable && tp2.prefix.isStable || tp1.symbol == defn.TypeTest_unapply) && tp1 =:= tp2 - /** Parameter types of the case class type `tp`. Adapted from `unapplyPlan` in patternMatcher */ - def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int): List[Type] = { + /** Return term parameter types of the extractor `unapp`. + * Parameter types of the case class type `tp`. Adapted from `unapplyPlan` in patternMatcher */ + def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): List[Type] = { val unappSym = unapp.symbol // println("scrutineeTp = " + scrutineeTp.show) @@ -566,10 +568,10 @@ class SpaceEngine(using Context) extends SpaceLogic { if (isUnapplySeq) { val (arity, elemTp, resultTp) = unapplySeqInfo(resTp, unappSym.srcPos) - if (elemTp.exists) scalaListType.appliedTo(elemTp) :: Nil + if (elemTp.exists) defn.ListType.appliedTo(elemTp) :: Nil else { val sels = productSeqSelectors(resultTp, arity, unappSym.srcPos) - sels.init :+ scalaListType.appliedTo(sels.last) + sels.init :+ defn.ListType.appliedTo(sels.last) } } else { @@ -590,45 +592,48 @@ class SpaceEngine(using Context) extends SpaceLogic { } /** Whether the extractor covers the given type */ - def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int): Boolean = - SpaceEngine.isIrrefutable(unapp, argLen) || unapp.symbol == defn.TypeTest_unapply && { + def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): Boolean = + SpaceEngine.isIrrefutable(unapp, argLen) + || unapp.symbol == defn.TypeTest_unapply && { val AppliedType(_, _ :: tp :: Nil) = unapp.prefix.widen.dealias: @unchecked scrutineeTp <:< tp } + || unapp.symbol == defn.ClassTagClass_unapply && { + val AppliedType(_, tp :: Nil) = unapp.prefix.widen.dealias: @unchecked + scrutineeTp <:< tp + } /** Decompose a type into subspaces -- assume the type can be decomposed */ - def decompose(tp: Type): List[Typ] = - tp.dealias match { + def decompose(tp: Type)(using Context): List[Type] = trace(i"decompose($tp)", debug) { + def rec(tp: Type, mixins: List[Type]): List[Type] = tp.dealias match case AndType(tp1, tp2) => - def decomposeComponent(tpA: Type, tpB: Type): List[Typ] = - decompose(tpA).flatMap { - case Typ(tp, _) => - if tp <:< tpB then - Typ(tp, decomposed = true) :: Nil - else if tpB <:< tp then - Typ(tpB, decomposed = true) :: Nil - else if TypeComparer.provablyDisjoint(tp, tpB) then - Nil - else - Typ(AndType(tp, tpB), decomposed = true) :: Nil - } - - if canDecompose(tp1) then - decomposeComponent(tp1, tp2) - else - decomposeComponent(tp2, tp1) - - case OrType(tp1, tp2) => List(Typ(tp1, true), Typ(tp2, true)) - case tp if tp.isRef(defn.BooleanClass) => - List( - Typ(ConstantType(Constant(true)), true), - Typ(ConstantType(Constant(false)), true) - ) - case tp if tp.isRef(defn.UnitClass) => - Typ(ConstantType(Constant(())), true) :: Nil - case tp if tp.classSymbol.isAllOf(JavaEnumTrait) => - tp.classSymbol.children.map(sym => Typ(sym.termRef, true)) - case tp => + var tpB = tp2 + var parts = rec(tp1, tp2 :: mixins) + if parts == ListOfNoType then + tpB = tp1 + parts = rec(tp2, tp1 :: mixins) + if parts == ListOfNoType then ListOfNoType + else parts.collect: + case tp if tp <:< tpB => tp + case tp if tpB <:< tp => tpB + case tp if !TypeComparer.provablyDisjoint(tp, tpB) => AndType(tp, tpB) + + case OrType(tp1, tp2) => List(tp1, tp2) + case tp if tp.isRef(defn.BooleanClass) => List(ConstantType(Constant(true)), ConstantType(Constant(false))) + case tp if tp.isRef(defn.UnitClass) => ConstantType(Constant(())) :: Nil + case tp @ NamedType(Parts(parts), _) => parts.map(tp.derivedSelect) + case _: SingletonType => ListOfNoType + case tp if tp.classSymbol.isAllOf(JavaEnumTrait) => tp.classSymbol.children.map(_.termRef) + // the class of a java enum value is the enum class, so this must follow SingletonType to not loop infinitely + + case tp @ AppliedType(Parts(parts), targs) if tp.classSymbol.children.isEmpty => + // It might not obvious that it's OK to apply the type arguments of a parent type to child types. + // But this is guarded by `tp.classSymbol.children.isEmpty`, + // meaning we'll decompose to the same class, just not the same type. + // For instance, from i15029, `decompose((X | Y).Field[T]) = [X.Field[T], Y.Field[T]]`. + parts.map(tp.derivedAppliedType(_, targs)) + + case tp if tp.isDecomposableToChildren => def getChildren(sym: Symbol): List[Symbol] = sym.children.flatMap { child => if child eq sym then List(sym) // i3145: sealed trait Baz, val x = new Baz {}, Baz.children returns Baz... @@ -638,49 +643,53 @@ class SpaceEngine(using Context) extends SpaceLogic { else List(child) } val children = getChildren(tp.classSymbol) - debug.println(s"candidates for ${tp.show} : [${children.map(_.show).mkString(", ")}]") + debug.println(i"candidates for $tp : $children") val parts = children.map { sym => val sym1 = if (sym.is(ModuleClass)) sym.sourceModule else sym - val refined = TypeOps.refineUsingParent(tp, sym1) + val refined = TypeOps.refineUsingParent(tp, sym1, mixins) + debug.println(i"$sym1 refined to $refined") - debug.println(sym1.show + " refined to " + refined.show) + def inhabited(tp: Type): Boolean = tp.dealias match + case AndType(tp1, tp2) => !TypeComparer.provablyDisjoint(tp1, tp2) + case OrType(tp1, tp2) => inhabited(tp1) || inhabited(tp2) + case tp: RefinedType => inhabited(tp.parent) + case tp: TypeRef => inhabited(tp.prefix) + case _ => true - def inhabited(tp: Type): Boolean = - tp.dealias match { - case AndType(tp1, tp2) => !TypeComparer.provablyDisjoint(tp1, tp2) - case OrType(tp1, tp2) => inhabited(tp1) || inhabited(tp2) - case tp: RefinedType => inhabited(tp.parent) - case tp: TypeRef => inhabited(tp.prefix) - case _ => true - } - - if (inhabited(refined)) refined + if inhabited(refined) then refined else NoType - } filter(_.exists) + }.filter(_.exists) + debug.println(i"$tp decomposes to $parts") + parts - debug.println(s"${tp.show} decomposes to [${parts.map(_.show).mkString(", ")}]") + case _ => ListOfNoType + end rec - parts.map(Typ(_, true)) - } + rec(tp, Nil) + } - /** Abstract sealed types, or-types, Boolean and Java enums can be decomposed */ - def canDecompose(tp: Type): Boolean = - val res = tp.dealias match - case _: SingletonType => false - case _: OrType => true - case and: AndType => canDecompose(and.tp1) || canDecompose(and.tp2) - case _ => - val cls = tp.classSymbol - cls.is(Sealed) - && cls.isOneOf(AbstractOrTrait) - && !cls.hasAnonymousChild - && cls.children.nonEmpty - || cls.isAllOf(JavaEnumTrait) - || tp.isRef(defn.BooleanClass) - || tp.isRef(defn.UnitClass) - //debug.println(s"decomposable: ${tp.show} = $res") - res + extension (tp: Type) + /** A type is decomposable to children if it has a simple kind, it's sealed, + * abstract (or a trait) - so its not a sealed concrete class that can be instantiated on its own, + * has no anonymous children, which we wouldn't be able to name as counter-examples, + * but does have children. + * + * A sealed trait with no subclasses is considered not decomposable and thus is treated as an opaque type. + * A sealed trait with subclasses that then get removed after `refineUsingParent`, decomposes to the empty list. + * So that's why we consider whether a type has children. */ + def isDecomposableToChildren(using Context): Boolean = + val cls = tp.classSymbol + tp.hasSimpleKind && cls.is(Sealed) && cls.isOneOf(AbstractOrTrait) && !cls.hasAnonymousChild && cls.children.nonEmpty + + val ListOfNoType = List(NoType) + val ListOfTypNoType = ListOfNoType.map(Typ(_, decomposed = true)) + + object Parts: + def unapply(tp: Type)(using Context): PartsExtractor = PartsExtractor(decompose(tp)) + + final class PartsExtractor(val get: List[Type]) extends AnyVal: + def isEmpty: Boolean = get == ListOfNoType /** Show friendly type name with current scope in mind * @@ -690,7 +699,7 @@ class SpaceEngine(using Context) extends SpaceLogic { * C --> C if current owner is C !!! * */ - def showType(tp: Type, showTypeArgs: Boolean = false): String = { + def showType(tp: Type, showTypeArgs: Boolean = false)(using Context): String = { val enclosingCls = ctx.owner.enclosingClass def isOmittable(sym: Symbol) = @@ -731,7 +740,7 @@ class SpaceEngine(using Context) extends SpaceLogic { } /** Whether the counterexample is satisfiable. The space is flattened and non-empty. */ - def satisfiable(sp: Space): Boolean = { + def satisfiable(sp: Space)(using Context): Boolean = { def impossible: Nothing = throw new AssertionError("`satisfiable` only accepts flattened space.") def genConstraint(space: Space): List[(Type, Type)] = space match { @@ -762,10 +771,10 @@ class SpaceEngine(using Context) extends SpaceLogic { checkConstraint(genConstraint(sp))(using ctx.fresh.setNewTyperState()) } - def show(ss: Seq[Space]): String = ss.map(show).mkString(", ") + def showSpaces(ss: Seq[Space])(using Context): String = ss.map(show).mkString(", ") /** Display spaces */ - def show(s: Space): String = { + def show(s: Space)(using Context): String = { def params(tp: Type): List[Type] = tp.classSymbol.primaryConstructor.info.firstParamTypes /** does the companion object of the given symbol have custom unapply */ @@ -779,7 +788,7 @@ class SpaceEngine(using Context) extends SpaceLogic { case Empty => "empty" case Typ(c: ConstantType, _) => "" + c.value.value case Typ(tp: TermRef, _) => - if (flattenList && tp <:< scalaNilType) "" + if (flattenList && tp <:< defn.NilType) "" else tp.symbol.showName case Typ(tp, decomposed) => @@ -787,9 +796,9 @@ class SpaceEngine(using Context) extends SpaceLogic { if (ctx.definitions.isTupleNType(tp)) params(tp).map(_ => "_").mkString("(", ", ", ")") - else if (scalaListType.isRef(sym)) + else if (defn.ListType.isRef(sym)) if (flattenList) "_*" else "_: List" - else if (scalaConsType.isRef(sym)) + else if (defn.ConsType.isRef(sym)) if (flattenList) "_, _*" else "List(_, _*)" else if (tp.classSymbol.is(Sealed) && tp.classSymbol.hasAnonymousChild) "_: " + showType(tp) + " (anonymous)" @@ -801,7 +810,7 @@ class SpaceEngine(using Context) extends SpaceLogic { case Prod(tp, fun, params) => if (ctx.definitions.isTupleNType(tp)) "(" + params.map(doShow(_)).mkString(", ") + ")" - else if (tp.isRef(scalaConsType.symbol)) + else if (tp.isRef(defn.ConsType.symbol)) if (flattenList) params.map(doShow(_, flattenList)).filter(_.nonEmpty).mkString(", ") else params.map(doShow(_, flattenList = true)).filter(!_.isEmpty).mkString("List(", ", ", ")") else { @@ -817,7 +826,7 @@ class SpaceEngine(using Context) extends SpaceLogic { doShow(s, flattenList = false) } - private def exhaustivityCheckable(sel: Tree): Boolean = { + private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = { val seen = collection.mutable.Set.empty[Type] // Possible to check everything, but be compatible with scalac by default @@ -846,8 +855,8 @@ class SpaceEngine(using Context) extends SpaceLogic { res } - /** Whehter counter-examples should be further checked? True for GADTs. */ - private def shouldCheckExamples(tp: Type): Boolean = + /** Whether counter-examples should be further checked? True for GADTs. */ + private def shouldCheckExamples(tp: Type)(using Context): Boolean = new TypeAccumulator[Boolean] { override def apply(b: Boolean, tp: Type): Boolean = tp match { case tref: TypeRef if tref.symbol.is(TypeParam) && variance != 1 => true @@ -858,7 +867,7 @@ class SpaceEngine(using Context) extends SpaceLogic { /** Return the underlying type of non-module, non-constant, non-enum case singleton types. * Also widen ExprType to its result type, and rewrap any annotation wrappers. * For example, with `val opt = None`, widen `opt.type` to `None.type`. */ - def toUnderlying(tp: Type): Type = trace(i"toUnderlying($tp)", show = true)(tp match { + def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp)", show = true)(tp match { case _: ConstantType => tp case tp: TermRef if tp.symbol.is(Module) => tp case tp: TermRef if tp.symbol.isAllOf(EnumCase) => tp @@ -868,16 +877,11 @@ class SpaceEngine(using Context) extends SpaceLogic { case _ => tp }) - def checkExhaustivity(_match: Match): Unit = { - val Match(sel, cases) = _match - debug.println(i"checking exhaustivity of ${_match}") - - if (!exhaustivityCheckable(sel)) return - - val selTyp = toUnderlying(sel.tpe).dealias + def checkExhaustivity(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then trace(i"checkExhaustivity($m)", debug) { + val selTyp = toUnderlying(m.selector.tpe).dealias debug.println(i"selTyp = $selTyp") - val patternSpace = Or(cases.foldLeft(List.empty[Space]) { (acc, x) => + val patternSpace = Or(m.cases.foldLeft(List.empty[Space]) { (acc, x) => val space = if (x.guard.isEmpty) project(x.pat) else Empty debug.println(s"${x.pat.show} ====> ${show(space)}") space :: acc @@ -894,10 +898,10 @@ class SpaceEngine(using Context) extends SpaceLogic { if uncovered.nonEmpty then val hasMore = uncovered.lengthCompare(6) > 0 val deduped = dedup(uncovered.take(6)) - report.warning(PatternMatchExhaustivity(show(deduped), hasMore), sel.srcPos) + report.warning(PatternMatchExhaustivity(showSpaces(deduped), hasMore), m.selector) } - private def redundancyCheckable(sel: Tree): Boolean = + private def redundancyCheckable(sel: Tree)(using Context): Boolean = // Ignore Expr[T] and Type[T] for unreachability as a special case. // Quote patterns produce repeated calls to the same unapply method, but with different implicit parameters. // Since we assume that repeated calls to the same unapply method overlap @@ -907,19 +911,15 @@ class SpaceEngine(using Context) extends SpaceLogic { && !sel.tpe.widen.isRef(defn.QuotedExprClass) && !sel.tpe.widen.isRef(defn.QuotedTypeClass) - def checkRedundancy(_match: Match): Unit = { - val Match(sel, _) = _match - val cases = _match.cases.toIndexedSeq - debug.println(i"checking redundancy in $_match") - - if (!redundancyCheckable(sel)) return + def checkRedundancy(m: Match)(using Context): Unit = if redundancyCheckable(m.selector) then trace(i"checkRedundancy($m)", debug) { + val cases = m.cases.toIndexedSeq - val selTyp = toUnderlying(sel.tpe).dealias + val selTyp = toUnderlying(m.selector.tpe).dealias debug.println(i"selTyp = $selTyp") val isNullable = selTyp.classSymbol.isNullableClass val targetSpace = if isNullable - then project(OrType(selTyp, constantNullType, soft = false)) + then project(OrType(selTyp, ConstantType(Constant(null)), soft = false)) else project(selTyp) debug.println(s"targetSpace: ${show(targetSpace)}") @@ -948,6 +948,7 @@ class SpaceEngine(using Context) extends SpaceLogic { for (pat <- deferred.reverseIterator) report.warning(MatchCaseUnreachable(), pat.srcPos) if pat != EmptyTree // rethrow case of catch uses EmptyTree + && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase && isSubspace(covered, prev) then { val nullOnly = isNullable && i == len - 1 && isWildcardArg(pat) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala index 8851e641122f..6471e58d4ddc 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala @@ -65,7 +65,7 @@ class AddLocalJSFakeNews extends MiniPhase { thisPhase => constant.typeValue.typeSymbol.asClass case _ => // this shouldn't happen - report.error(i"unexpected $classValueArg for the first argument to `createLocalJSClass`", classValueArg) + report.error(em"unexpected $classValueArg for the first argument to `createLocalJSClass`", classValueArg) jsdefn.JSObjectClass } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala index 3c87621413b7..705b3cc404a8 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala @@ -651,7 +651,7 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => case typeRef: TypeRef => typeRef case _ => // This should not have passed the checks in PrepJSInterop - report.error(i"class type required but found $tpe0", tree) + report.error(em"class type required but found $tpe0", tree) jsdefn.JSObjectType } val cls = tpe.typeSymbol @@ -667,7 +667,7 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => val jsclassAccessor = jsclassAccessorFor(cls) ref(NamedType(prefix, jsclassAccessor.name, jsclassAccessor.denot)) } else { - report.error(i"stable reference to a JS class required but $tpe found", tree) + report.error(em"stable reference to a JS class required but $tpe found", tree) ref(defn.Predef_undefined) } } else if (isLocalJSClass(cls)) { diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala index 30eed76b18ec..115d41dd3d46 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala @@ -211,6 +211,23 @@ object JSSymUtils { } } } + + /** Tests whether the semantics of Scala.js require a field for this symbol, + * irrespective of any optimization we think we can do. + * + * This is the case if one of the following is true: + * + * - it is a member of a JS type, since it needs to be visible as a JavaScript field + * - is is exported as static member of the companion class, since it needs to be visible as a JavaScript static field + * - it is exported to the top-level, since that can only be done as a true top-level variable, i.e., a field + */ + def sjsNeedsField(using Context): Boolean = + ctx.settings.scalajs.value && ( + sym.owner.isJSType + || sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot) + || sym.hasAnnotation(jsdefn.JSExportStaticAnnot) + ) + end sjsNeedsField } private object JSUnaryOpMethodName { diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala index 817a6c5afabc..b911d7dfab96 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala @@ -13,6 +13,7 @@ import Scopes._ import Symbols._ import StdNames._ import Types._ +import Decorators.em import dotty.tools.dotc.transform.MegaPhase._ @@ -238,7 +239,7 @@ class JUnitBootstrappers extends MiniPhase { case NamedArg(name, _) => name.show(using ctx) case other => other.show(using ctx) } - report.error(s"$shownName is an unsupported argument for the JUnit @Test annotation in this position", other.sourcePos) + report.error(em"$shownName is an unsupported argument for the JUnit @Test annotation in this position", other.sourcePos) None } } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index b0de197635e9..25ab46712e70 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -189,7 +189,7 @@ object PrepJSExports { if (hasExplicitName) { annot.argumentConstantString(0).getOrElse { report.error( - s"The argument to ${annot.symbol.name} must be a literal string", + em"The argument to ${annot.symbol.name} must be a literal string", annot.arguments(0)) "dummy" } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index e75769147f80..a2f9a0fb45a3 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -93,6 +93,24 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP } } + private var dynamicImportEnclosingClasses: Set[Symbol] = Set.empty + + private def enterDynamicImportEnclosingClass[A](cls: Symbol)(body: => A): A = { + val saved = dynamicImportEnclosingClasses + dynamicImportEnclosingClasses = saved + cls + try + body + finally + dynamicImportEnclosingClasses = saved + } + + private def hasImplicitThisPrefixToDynamicImportEnclosingClass(tpe: Type)(using Context): Boolean = + tpe match + case tpe: ThisType => dynamicImportEnclosingClasses.contains(tpe.cls) + case TermRef(prefix, _) => hasImplicitThisPrefixToDynamicImportEnclosingClass(prefix) + case _ => false + end hasImplicitThisPrefixToDynamicImportEnclosingClass + /** DefDefs in class templates that export methods to JavaScript */ private val exporters = mutable.Map.empty[Symbol, mutable.ListBuffer[Tree]] @@ -248,9 +266,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP if (tpeSym.isJSType) { def reportError(reasonAndExplanation: String): Unit = { report.error( - "Using an anonymous function as a SAM for the JavaScript type " + - i"${tpeSym.fullName} is not allowed because " + - reasonAndExplanation, + em"Using an anonymous function as a SAM for the JavaScript type ${ + tpeSym.fullName + } is not allowed because $reasonAndExplanation", tree) } if (!tpeSym.is(Trait) || tpeSym.asClass.superClass != jsdefn.JSFunctionClass) { @@ -297,10 +315,15 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP assert(currentOwner.isTerm, s"unexpected owner: $currentOwner at ${tree.sourcePos}") + val enclosingClass = currentOwner.enclosingClass + // new DynamicImportThunk { def apply(): Any = body } val dynamicImportThunkAnonClass = AnonClass(currentOwner, List(jsdefn.DynamicImportThunkType), span) { cls => val applySym = newSymbol(cls, nme.apply, Method, MethodType(Nil, Nil, defn.AnyType), coord = span).entered - val newBody = transform(body).changeOwnerAfter(currentOwner, applySym, thisPhase) + val transformedBody = enterDynamicImportEnclosingClass(enclosingClass) { + transform(body) + } + val newBody = transformedBody.changeOwnerAfter(currentOwner, applySym, thisPhase) val applyDefDef = DefDef(applySym, newBody) List(applyDefDef) } @@ -310,6 +333,14 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP .appliedToTypeTree(tpeArg) .appliedTo(dynamicImportThunkAnonClass) + // #17344 Make `ThisType`-based references to enclosing classes of `js.dynamicImport` explicit + case tree: Ident if hasImplicitThisPrefixToDynamicImportEnclosingClass(tree.tpe) => + def rec(tpe: Type): Tree = (tpe: @unchecked) match // exhaustive because of the `if ... =>` + case tpe: ThisType => This(tpe.cls) + case tpe @ TermRef(prefix, _) => rec(prefix).select(tpe.symbol) + + rec(tree.tpe).withSpan(tree.span) + // Compile-time errors and warnings for js.Dynamic.literal case Apply(Apply(fun, nameArgs), args) if fun.symbol == jsdefn.JSDynamicLiteral_applyDynamic || @@ -318,9 +349,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP nameArgs match { case List(Literal(Constant(s: String))) => if (s != "apply") - report.error(i"js.Dynamic.literal does not have a method named $s", tree) + report.error(em"js.Dynamic.literal does not have a method named $s", tree) case _ => - report.error(i"js.Dynamic.literal.${tree.symbol.name} may not be called directly", tree) + report.error(em"js.Dynamic.literal.${tree.symbol.name} may not be called directly", tree) } // TODO Warn for known duplicate property names @@ -381,7 +412,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tpe.underlyingClassRef(refinementOK = false) match { case typeRef: TypeRef if typeRef.symbol.isOneOf(Trait | ModuleClass) => - report.error(i"non-trait class type required but $tpe found", tpeArg) + report.error(em"non-trait class type required but $tpe found", tpeArg) case _ => // an error was already reported above } @@ -440,7 +471,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP * which is never valid. */ report.error( - i"${sym.name} extends ${parentSym.fullName} which does not extend js.Any.", + em"${sym.name} extends ${parentSym.fullName} which does not extend js.Any.", classDef) } } @@ -502,8 +533,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP def emitOverrideError(msg: String): Unit = { report.error( - "error overriding %s;\n %s %s".format( - infoStringWithLocation(overridden), infoString(overriding), msg), + em"""error overriding ${infoStringWithLocation(overridden)}; + | ${infoString(overriding)} $msg""", errorPos) } @@ -559,7 +590,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP for (annot <- sym.annotations) { val annotSym = annot.symbol if (isJSNativeLoadingSpecAnnot(annotSym)) - report.error(i"Traits may not have an @${annotSym.name} annotation.", annot.tree) + report.error(em"Traits may not have an @${annotSym.name} annotation.", annot.tree) } } else { checkJSNativeLoadSpecOf(treePos, sym) @@ -571,7 +602,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP def checkGlobalRefName(globalRef: String): Unit = { if (!JSGlobalRef.isValidJSGlobalRefName(globalRef)) - report.error(s"The name of a JS global variable must be a valid JS identifier (got '$globalRef')", pos) + report.error(em"The name of a JS global variable must be a valid JS identifier (got '$globalRef')", pos) } if (enclosingOwner is OwnerKind.JSNative) { @@ -585,7 +616,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP for (annot <- sym.annotations) { val annotSym = annot.symbol if (isJSNativeLoadingSpecAnnot(annotSym)) - report.error(i"Nested JS classes and objects cannot have an @${annotSym.name} annotation.", annot.tree) + report.error(em"Nested JS classes and objects cannot have an @${annotSym.name} annotation.", annot.tree) } if (sym.owner.isStaticOwner) { @@ -731,7 +762,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP if (overriddenSymbols.hasNext) { val overridden = overriddenSymbols.next() val verb = if (overridden.is(Deferred)) "implement" else "override" - report.error(i"An @js.native member cannot $verb the inherited member ${overridden.fullName}", tree) + report.error(em"An @js.native member cannot $verb the inherited member ${overridden.fullName}", tree) } tree @@ -888,6 +919,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP report.error("A non-native JS trait cannot contain private members", tree) } else if (sym.is(Lazy)) { report.error("A non-native JS trait cannot contain lazy vals", tree) + } else if (sym.is(ParamAccessor)) { + // #12621 + report.error("A non-native JS trait cannot have constructor parameters", tree) } else if (!sym.is(Deferred)) { /* Tell the back-end not to emit this thing. In fact, this only * matters for mixed-in members created from this member. @@ -974,6 +1008,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tree.rhs match { case sel: Select if sel.symbol == jsdefn.JSPackage_native => // ok + case rhs: Ident if rhs.symbol == jsdefn.JSPackage_native => + // ok case _ => val pos = if (tree.rhs != EmptyTree) tree.rhs.srcPos else tree.srcPos report.error(s"$longKindStr may only call js.native.", pos) @@ -982,7 +1018,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP // Check that the result type was explicitly specified // (This is stronger than Scala 2, which only warns, and only if it was inferred as Nothing.) if (tree.tpt.isInstanceOf[InferredTypeTree]) - report.error(i"The type of ${tree.name} must be explicitly specified because it is JS native.", tree) + report.error(em"The type of ${tree.name} must be explicitly specified because it is JS native.", tree) } private def checkJSNativeSpecificAnnotsOnNonJSNative(memberDef: MemberDef)(using Context): Unit = { @@ -1319,7 +1355,7 @@ object PrepJSInterop { for (annotation <- sym.annotations) { if (isCompilerAnnotation(annotation)) { report.error( - i"@${annotation.symbol.fullName} is for compiler internal use only. Do not use it yourself.", + em"@${annotation.symbol.fullName} is for compiler internal use only. Do not use it yourself.", annotation.tree) } } diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 386bae6d5338..9c23b7e2024f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -6,7 +6,6 @@ import core._ import ast.{Trees, tpd, untpd, desugar} import util.Stats.record import util.{SrcPos, NoSourcePosition} -import Trees.Untyped import Contexts._ import Flags._ import Symbols._ @@ -24,7 +23,7 @@ import Inferencing._ import reporting._ import transform.TypeUtils._ import transform.SymUtils._ -import Nullables._ +import Nullables._, NullOpsDecorator.* import config.Feature import collection.mutable @@ -47,7 +46,7 @@ object Applications { def extractorMemberType(tp: Type, name: Name, errorPos: SrcPos)(using Context): Type = { val ref = extractorMember(tp, name) if (ref.isOverloaded) - errorType(i"Overloaded reference to $ref is not allowed in extractor", errorPos) + errorType(em"Overloaded reference to $ref is not allowed in extractor", errorPos) ref.info.widenExpr.annotatedToRepeated } @@ -273,6 +272,7 @@ object Applications { else def selectGetter(qual: Tree): Tree = val getterDenot = qual.tpe.member(getterName) + .accessibleFrom(qual.tpe.widenIfUnstable, superAccess = true) // to reset Local if (getterDenot.exists) qual.select(TermRef(qual.tpe, getterName, getterDenot)) else EmptyTree if !meth.isClassConstructor then @@ -341,6 +341,12 @@ object Applications { val getter = findDefaultGetter(fn, n, testOnly) if getter.isEmpty then getter else spliceMeth(getter.withSpan(fn.span), fn) + + def retypeSignaturePolymorphicFn(fun: Tree, methType: Type)(using Context): Tree = + val sym1 = fun.symbol + val flags2 = sym1.flags | NonMember // ensures Select typing doesn't let TermRef#withPrefix revert the type + val sym2 = sym1.copy(info = methType, flags = flags2) // symbol not entered, to avoid overload resolution problems + fun.withType(sym2.termRef) } trait Applications extends Compatibility { @@ -438,10 +444,17 @@ trait Applications extends Compatibility { /** The function's type after widening and instantiating polytypes * with TypeParamRefs in constraint set */ - @threadUnsafe lazy val methType: Type = liftedFunType.widen match { - case funType: MethodType => funType - case funType: PolyType => instantiateWithTypeVars(funType) - case tp => tp //was: funType + @threadUnsafe lazy val methType: Type = { + def rec(t: Type): Type = { + t.widen match{ + case funType: MethodType => funType + case funType: PolyType => + rec(instantiateWithTypeVars(funType)) + case tp => tp + } + } + + rec(liftedFunType) } @threadUnsafe lazy val liftedFunType: Type = @@ -479,7 +492,7 @@ trait Applications extends Compatibility { matchArgs(orderedArgs, methType.paramInfos, 0) case _ => if (methType.isError) ok = false - else fail(s"$methString does not take parameters".toMessage) + else fail(em"$methString does not take parameters") } /** The application was successful */ @@ -491,7 +504,7 @@ trait Applications extends Compatibility { i"${err.refStr(methRef)}$infoStr" /** Re-order arguments to correctly align named arguments */ - def reorder[T >: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { + def reorder[T <: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { /** @param pnames The list of parameter names that are missing arguments * @param args The list of arguments that are not yet passed, or that are waiting to be dropped @@ -519,10 +532,10 @@ trait Applications extends Compatibility { else { // name not (or no longer) available for named arg def msg = if (methodType.paramNames contains aname) - s"parameter $aname of $methString is already instantiated" + em"parameter $aname of $methString is already instantiated" else - s"$methString does not have a parameter $aname" - fail(msg.toMessage, arg.asInstanceOf[Arg]) + em"$methString does not have a parameter $aname" + fail(msg, arg.asInstanceOf[Arg]) arg :: handleNamed(pnamesRest, args1, nameToArg, toDrop) } case arg :: args1 => @@ -548,7 +561,7 @@ trait Applications extends Compatibility { /** Is `sym` a constructor of a Java-defined annotation? */ def isJavaAnnotConstr(sym: Symbol): Boolean = - sym.is(JavaDefined) && sym.isConstructor && sym.owner.derivesFrom(defn.AnnotationClass) + sym.is(JavaDefined) && sym.isConstructor && sym.owner.is(JavaAnnotation) /** Match re-ordered arguments against formal parameters * @param n The position of the first parameter in formals in `methType`. @@ -564,7 +577,7 @@ trait Applications extends Compatibility { i"it is not the only argument to be passed to the corresponding repeated parameter $formal" else i"the corresponding parameter has type $formal which is not a repeated parameter type" - fail(em"Sequence argument type annotation `*` cannot be used here:\n$addendum".toMessage, arg) + fail(em"Sequence argument type annotation `*` cannot be used here:\n$addendum", arg) /** Add result of typing argument `arg` against parameter type `formal`. * @return The remaining formal parameter types. If the method is parameter-dependent @@ -648,10 +661,10 @@ trait Applications extends Compatibility { def msg = arg match case untpd.Tuple(Nil) if applyKind == ApplyKind.InfixTuple && funType.widen.isNullaryMethod => - i"can't supply unit value with infix notation because nullary $methString takes no arguments; use dotted invocation instead: (...).${methRef.name}()" + em"can't supply unit value with infix notation because nullary $methString takes no arguments; use dotted invocation instead: (...).${methRef.name}()" case _ => - i"too many arguments for $methString" - fail(msg.toMessage, arg) + em"too many arguments for $methString" + fail(msg, arg) case nil => } } @@ -708,8 +721,8 @@ trait Applications extends Compatibility { || argMatch == ArgMatch.CompatibleCAP && { val argtpe1 = argtpe.widen - val captured = captureWildcards(argtpe1) - (captured ne argtpe1) && isCompatible(captured, formal.widenExpr) + val captured = captureWildcardsCompat(argtpe1, formal.widenExpr) + captured ne argtpe1 } /** The type of the given argument */ @@ -754,7 +767,7 @@ trait Applications extends Compatibility { /** Subclass of Application for type checking an Apply node, where * types of arguments are either known or unknown. */ - abstract class TypedApply[T >: Untyped]( + abstract class TypedApply[T <: Untyped]( app: untpd.Apply, fun: Tree, methRef: TermRef, args: List[Trees.Tree[T]], resultType: Type, override val applyKind: ApplyKind)(using Context) extends Application(methRef, fun.tpe, args, resultType) { @@ -831,7 +844,7 @@ trait Applications extends Compatibility { var typedArgs = typedArgBuf.toList def app0 = cpy.Apply(app)(normalizedFun, typedArgs) // needs to be a `def` because typedArgs can change later val app1 = - if (!success) app0.withType(UnspecifiedErrorType) + if (!success || typedArgs.exists(_.tpe.isError)) app0.withType(UnspecifiedErrorType) else { if !sameSeq(args, orderedArgs) && !isJavaAnnotConstr(methRef.symbol) @@ -937,6 +950,21 @@ trait Applications extends Compatibility { /** Type application where arguments come from prototype, and no implicits are inserted */ def simpleApply(fun1: Tree, proto: FunProto)(using Context): Tree = methPart(fun1).tpe match { + case funRef: TermRef if funRef.symbol.isSignaturePolymorphic => + // synthesize a method type based on the types at the call site. + // one can imagine the original signature-polymorphic method as + // being infinitely overloaded, with each individual overload only + // being brought into existence as needed + val originalResultType = funRef.symbol.info.resultType.stripNull + val resultType = + if !originalResultType.isRef(defn.ObjectClass) then originalResultType + else AvoidWildcardsMap()(proto.resultType.deepenProtoTrans) match + case SelectionProto(nme.asInstanceOf_, PolyProto(_, resTp), _, _) => resTp + case resTp if isFullyDefined(resTp, ForceDegree.all) => resTp + case _ => defn.ObjectType + val methType = MethodType(proto.typedArgs().map(_.tpe.widen), resultType) + val fun2 = Applications.retypeSignaturePolymorphicFn(fun1, methType) + simpleApply(fun2, proto) case funRef: TermRef => val app = ApplyTo(tree, fun1, funRef, proto, pt) convertNewGenericArray( @@ -982,7 +1010,10 @@ trait Applications extends Compatibility { case TypeApply(fun, _) => !fun.isInstanceOf[Select] case _ => false } - typedDynamicApply(tree, isInsertedApply, pt) + val tree1 = fun1 match + case Select(_, nme.apply) => tree + case _ => untpd.Apply(fun1, tree.args) + typedDynamicApply(tree1, isInsertedApply, pt) case _ => if (originalProto.isDropped) fun1 else if (fun1.symbol == defn.Compiletime_summonFrom) @@ -1066,7 +1097,7 @@ trait Applications extends Compatibility { } else { val app = tree.fun match - case _: untpd.Splice if ctx.mode.is(Mode.QuotedPattern) => typedAppliedSplice(tree, pt) + case _: untpd.SplicePattern => typedAppliedSplice(tree, pt) case _ => realApply app match { case Apply(fn @ Select(left, _), right :: Nil) if fn.hasType => @@ -1097,7 +1128,7 @@ trait Applications extends Compatibility { /** Overridden in ReTyper to handle primitive operations that can be generated after erasure */ protected def handleUnexpectedFunType(tree: untpd.Apply, fun: Tree)(using Context): Tree = if ctx.reporter.errorsReported then - throw TypeError(i"unexpected function type: ${methPart(fun).tpe}") + throw TypeError(em"unexpected function type: ${methPart(fun).tpe}") else throw Error(i"unexpected type.\n fun = $fun,\n methPart(fun) = ${methPart(fun)},\n methPart(fun).tpe = ${methPart(fun).tpe},\n tpe = ${fun.tpe}") @@ -1105,8 +1136,8 @@ trait Applications extends Compatibility { for (case arg @ NamedArg(id, argtpt) <- args) yield { if !Feature.namedTypeArgsEnabled then report.error( - i"""Named type arguments are experimental, - |they must be enabled with a `experimental.namedTypeArguments` language import or setting""", + em"""Named type arguments are experimental, + |they must be enabled with a `experimental.namedTypeArguments` language import or setting""", arg.srcPos) val argtpt1 = typedType(argtpt) cpy.NamedArg(arg)(id, argtpt1).withType(argtpt1.tpe) @@ -1114,14 +1145,18 @@ trait Applications extends Compatibility { def typedTypeApply(tree: untpd.TypeApply, pt: Type)(using Context): Tree = { if (ctx.mode.is(Mode.Pattern)) - return errorTree(tree, "invalid pattern") + return errorTree(tree, em"invalid pattern") val isNamed = hasNamedArg(tree.args) val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) record("typedTypeApply") typedExpr(tree.fun, PolyProto(typedArgs, pt)) match { - case _: TypeApply if !ctx.isAfterTyper => - errorTree(tree, "illegal repeated type application") + case fun: TypeApply if !ctx.isAfterTyper => + val function = fun.fun + val args = (fun.args ++ tree.args).map(_.show).mkString(", ") + errorTree(tree, em"""illegal repeated type application + |You might have meant something like: + |${function}[${args}]""") case typedFn => typedFn.tpe.widen match { case pt: PolyType => @@ -1234,8 +1269,6 @@ trait Applications extends Compatibility { def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = { record("typedUnApply") val Apply(qual, args) = tree - if !ctx.mode.is(Mode.InTypeTest) then - checkMatchable(selType, tree.srcPos, pattern = true) def notAnExtractor(tree: Tree): Tree = // prefer inner errors @@ -1374,12 +1407,13 @@ trait Applications extends Compatibility { val unapplyArgType = mt.paramInfos.head unapp.println(i"unapp arg tpe = $unapplyArgType, pt = $selType") val ownType = - if (selType <:< unapplyArgType) { + if selType <:< unapplyArgType then unapp.println(i"case 1 $unapplyArgType ${ctx.typerState.constraint}") fullyDefinedType(unapplyArgType, "pattern selector", tree.srcPos) selType.dropAnnot(defn.UncheckedAnnot) // need to drop @unchecked. Just because the selector is @unchecked, the pattern isn't. - } - else { + else + if !ctx.mode.is(Mode.InTypeTest) then + checkMatchable(selType, tree.srcPos, pattern = true) // We ignore whether constraining the pattern succeeded. // Constraining only fails if the pattern cannot possibly match, // but useless pattern checks detect more such cases, so we simply rely on them instead. @@ -1388,7 +1422,7 @@ trait Applications extends Compatibility { if (patternBound.nonEmpty) unapplyFn = addBinders(unapplyFn, patternBound) unapp.println(i"case 2 $unapplyArgType ${ctx.typerState.constraint}") unapplyArgType - } + val dummyArg = dummyTreeOfType(ownType) val unapplyApp = typedExpr(untpd.TypedSplice(Apply(unapplyFn, dummyArg :: Nil))) def unapplyImplicits(unapp: Tree): List[Tree] = { @@ -1397,7 +1431,7 @@ trait Applications extends Compatibility { case Apply(Apply(unapply, `dummyArg` :: Nil), args2) => assert(args2.nonEmpty); res ++= args2 case Apply(unapply, `dummyArg` :: Nil) => case Inlined(u, _, _) => loop(u) - case DynamicUnapply(_) => report.error("Structural unapply is not supported", unapplyFn.srcPos) + case DynamicUnapply(_) => report.error(em"Structural unapply is not supported", unapplyFn.srcPos) case Apply(fn, args) => assert(args.nonEmpty); loop(fn); res ++= args case _ => ().assertingErrorsReported } @@ -1502,11 +1536,17 @@ trait Applications extends Compatibility { } /** Drop any leading implicit parameter sections */ - def stripImplicit(tp: Type)(using Context): Type = tp match { + def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { case mt: MethodType if mt.isImplicitMethod => - stripImplicit(resultTypeApprox(mt)) + stripImplicit(resultTypeApprox(mt, wildcardOnly)) case pt: PolyType => - pt.derivedLambdaType(pt.paramNames, pt.paramInfos, stripImplicit(pt.resultType)).asInstanceOf[PolyType].flatten + pt.derivedLambdaType(pt.paramNames, pt.paramInfos, + stripImplicit(pt.resultType, wildcardOnly = true)) + // can't use TypeParamRefs for parameter references in `resultTypeApprox` + // since their bounds can refer to type parameters in `pt` that are not + // bound by the constraint. This can lead to hygiene violations if subsequently + // `pt` itself is added to the constraint. Test case is run/enrich-gentraversable.scala. + .asInstanceOf[PolyType].flatten case _ => tp } @@ -1897,7 +1937,9 @@ trait Applications extends Compatibility { /** The shape of given tree as a type; cannot handle named arguments. */ def typeShape(tree: untpd.Tree): Type = tree match { case untpd.Function(args, body) => - defn.FunctionOf(args map Function.const(defn.AnyType), typeShape(body)) + defn.FunctionOf( + args.map(Function.const(defn.AnyType)), typeShape(body), + isContextual = untpd.isContextualClosure(tree)) case Match(EmptyTree, _) => defn.PartialFunctionClass.typeRef.appliedTo(defn.AnyType :: defn.NothingType :: Nil) case _ => @@ -1936,7 +1978,7 @@ trait Applications extends Compatibility { val formals = ref.widen.firstParamTypes if formals.length > idx then formals(idx) match - case defn.FunctionOf(args, _, _, _) => args.length + case defn.FunctionOf(args, _, _) => args.length case _ => -1 else -1 @@ -2020,31 +2062,35 @@ trait Applications extends Compatibility { if isDetermined(alts2) then alts2 else resolveMapped(alts1, _.widen.appliedTo(targs1.tpes), pt1) - case defn.FunctionOf(args, resultType, _, _) => - narrowByTypes(alts, args, resultType) - case pt => - val compat = alts.filterConserve(normalizedCompatible(_, pt, keepConstraint = false)) - if (compat.isEmpty) - /* - * the case should not be moved to the enclosing match - * since SAM type must be considered only if there are no candidates - * For example, the second f should be chosen for the following code: - * def f(x: String): Unit = ??? - * def f: java.io.OutputStream = ??? - * new java.io.ObjectOutputStream(f) - */ - pt match { - case SAMType(mtp) => - narrowByTypes(alts, mtp.paramInfos, mtp.resultType) - case _ => - // pick any alternatives that are not methods since these might be convertible - // to the expected type, or be used as extension method arguments. - val convertible = alts.filterNot(alt => - normalize(alt, IgnoredProto(pt)).widenSingleton.isInstanceOf[MethodType]) - if convertible.length == 1 then convertible else compat - } - else compat + val compat0 = pt match + case defn.FunctionOf(args, resType, _) => + narrowByTypes(alts, args, resType) + case _ => + Nil + if (compat0.isEmpty) then + val compat = alts.filterConserve(normalizedCompatible(_, pt, keepConstraint = false)) + if (compat.isEmpty) + /* + * the case should not be moved to the enclosing match + * since SAM type must be considered only if there are no candidates + * For example, the second f should be chosen for the following code: + * def f(x: String): Unit = ??? + * def f: java.io.OutputStream = ??? + * new java.io.ObjectOutputStream(f) + */ + pt match { + case SAMType(mtp) => + narrowByTypes(alts, mtp.paramInfos, mtp.resultType) + case _ => + // pick any alternatives that are not methods since these might be convertible + // to the expected type, or be used as extension method arguments. + val convertible = alts.filterNot(alt => + normalize(alt, IgnoredProto(pt)).widenSingleton.isInstanceOf[MethodType]) + if convertible.length == 1 then convertible else compat + } + else compat + else compat0 } /** The type of alternative `alt` after instantiating its first parameter @@ -2183,7 +2229,7 @@ trait Applications extends Compatibility { val formalsForArg: List[Type] = altFormals.map(_.head) def argTypesOfFormal(formal: Type): List[Type] = formal.dealias match { - case defn.FunctionOf(args, result, isImplicit, isErased) => args + case defn.FunctionOf(args, result, isImplicit) => args case defn.PartialFunctionOf(arg, result) => arg :: Nil case _ => Nil } @@ -2206,7 +2252,7 @@ trait Applications extends Compatibility { false val commonFormal = if (isPartial) defn.PartialFunctionOf(commonParamTypes.head, WildcardType) - else defn.FunctionOf(commonParamTypes, WildcardType) + else defn.FunctionOf(commonParamTypes, WildcardType, isContextual = untpd.isContextualClosure(arg)) overload.println(i"pretype arg $arg with expected type $commonFormal") if (commonParamTypes.forall(isFullyDefined(_, ForceDegree.flipBottom))) withMode(Mode.ImplicitsEnabled) { @@ -2375,9 +2421,14 @@ trait Applications extends Compatibility { else None catch - case NonFatal(_) => None + case ex: UnhandledError => None def isApplicableExtensionMethod(methodRef: TermRef, receiverType: Type)(using Context): Boolean = methodRef.symbol.is(ExtensionMethod) && !receiverType.isBottomType && tryApplyingExtensionMethod(methodRef, nullLiteral.asInstance(receiverType)).nonEmpty + + def captureWildcardsCompat(tp: Type, pt: Type)(using Context): Type = + val captured = captureWildcards(tp) + if (captured ne tp) && isCompatible(captured, pt) then captured + else tp } diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index c53213d7bd37..df5639b50302 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -33,7 +33,7 @@ import NameOps._ import SymDenotations.{NoCompleter, NoDenotation} import Applications.unapplyArgs import Inferencing.isFullyDefined -import transform.patmat.SpaceEngine.isIrrefutable +import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotedPattern} import config.Feature import config.Feature.sourceVersion import config.SourceVersion._ @@ -67,11 +67,12 @@ object Checking { */ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type = NoType, tpt: Tree = EmptyTree)(using Context): Unit = - args.lazyZip(boundss).foreach { (arg, bound) => - if !bound.isLambdaSub && !arg.tpe.hasSimpleKind then - errorTree(arg, - showInferred(MissingTypeParameterInTypeApp(arg.tpe), app, tpt)) - } + if ctx.phase != Phases.checkCapturesPhase then + args.lazyZip(boundss).foreach { (arg, bound) => + if !bound.isLambdaSub && !arg.tpe.hasSimpleKind then + errorTree(arg, + showInferred(MissingTypeParameterInTypeApp(arg.tpe), app, tpt)) + } for (arg, which, bound) <- TypeOps.boundsViolations(args, boundss, instantiate, app) do report.error( showInferred(DoesNotConformToBound(arg.tpe, which, bound), app, tpt), @@ -154,7 +155,7 @@ object Checking { checker.traverse(tpt.tpe) def checkNoWildcard(tree: Tree)(using Context): Tree = tree.tpe match { - case tpe: TypeBounds => errorTree(tree, "no wildcard type allowed here") + case tpe: TypeBounds => errorTree(tree, em"no wildcard type allowed here") case _ => tree } @@ -184,12 +185,14 @@ object Checking { /** Check that `tp` refers to a nonAbstract class * and that the instance conforms to the self type of the created class. */ - def checkInstantiable(tp: Type, pos: SrcPos)(using Context): Unit = + def checkInstantiable(tp: Type, srcTp: Type, pos: SrcPos)(using Context): Unit = tp.underlyingClassRef(refinementOK = false) match case tref: TypeRef => val cls = tref.symbol - if (cls.isOneOf(AbstractOrTrait)) - report.error(CantInstantiateAbstractClassOrTrait(cls, isTrait = cls.is(Trait)), pos) + if (cls.isOneOf(AbstractOrTrait)) { + val srcCls = srcTp.underlyingClassRef(refinementOK = false).typeSymbol + report.error(CantInstantiateAbstractClassOrTrait(srcCls, isTrait = srcCls.is(Trait)), pos) + } if !cls.is(Module) then // Create a synthetic singleton type instance, and check whether // it conforms to the self type of the class as seen from that instance. @@ -471,11 +474,11 @@ object Checking { def checkWithDeferred(flag: FlagSet) = if (sym.isOneOf(flag)) fail(AbstractMemberMayNotHaveModifier(sym, flag)) - def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: => String) = - if (sym.isAllOf(flag1 | flag2)) fail(msg.toMessage) + def checkNoConflict(flag1: FlagSet, flag2: FlagSet, msg: Message) = + if (sym.isAllOf(flag1 | flag2)) fail(msg) def checkCombination(flag1: FlagSet, flag2: FlagSet) = if sym.isAllOf(flag1 | flag2) then - fail(i"illegal combination of modifiers: `${flag1.flagsString}` and `${flag2.flagsString}` for: $sym".toMessage) + fail(em"illegal combination of modifiers: `${flag1.flagsString}` and `${flag2.flagsString}` for: $sym") def checkApplicable(flag: Flag, ok: Boolean) = if sym.is(flag, butNot = Synthetic) && !ok then fail(ModifierNotAllowedForDefinition(flag)) @@ -495,15 +498,20 @@ object Checking { } if sym.is(Transparent) then if sym.isType then - if !sym.is(Trait) then fail(em"`transparent` can only be used for traits".toMessage) + if !sym.isExtensibleClass then fail(em"`transparent` can only be used for extensible classes and traits") else - if !sym.isInlineMethod then fail(em"`transparent` can only be used for inline methods".toMessage) + if !sym.isInlineMethod then fail(em"`transparent` can only be used for inline methods") if (!sym.isClass && sym.is(Abstract)) fail(OnlyClassesCanBeAbstract(sym)) // note: this is not covered by the next test since terms can be abstract (which is a dual-mode flag) // but they can never be one of ClassOnlyFlags if !sym.isClass && sym.isOneOf(ClassOnlyFlags) then - fail(em"only classes can be ${(sym.flags & ClassOnlyFlags).flagsString}".toMessage) + val illegal = sym.flags & ClassOnlyFlags + if sym.is(TypeParam) && illegal == Sealed && Feature.ccEnabled && cc.allowUniversalInBoxed then + if !sym.owner.is(Method) then + fail(em"only method type parameters can be sealed") + else + fail(em"only classes can be ${illegal.flagsString}") if (sym.is(AbsOverride) && !sym.owner.is(Trait)) fail(AbstractOverrideOnlyInTraits(sym)) if sym.is(Trait) then @@ -520,7 +528,7 @@ object Checking { if !sym.isOneOf(Method | ModuleVal) then fail(TailrecNotApplicable(sym)) else if sym.is(Inline) then - fail("Inline methods cannot be @tailrec".toMessage) + fail(em"Inline methods cannot be @tailrec") if sym.hasAnnotation(defn.TargetNameAnnot) && sym.isClass && sym.isTopLevelClass then fail(TargetNameOnTopLevelClass(sym)) if (sym.hasAnnotation(defn.NativeAnnot)) { @@ -539,7 +547,7 @@ object Checking { fail(CannotExtendAnyVal(sym)) if (sym.isConstructor && !sym.isPrimaryConstructor && sym.owner.is(Trait, butNot = JavaDefined)) val addendum = if ctx.settings.Ydebug.value then s" ${sym.owner.flagsString}" else "" - fail(s"Traits cannot have secondary constructors$addendum".toMessage) + fail(em"Traits cannot have secondary constructors$addendum") checkApplicable(Inline, sym.isTerm && !sym.isOneOf(Mutable | Module)) checkApplicable(Lazy, !sym.isOneOf(Method | Mutable)) if (sym.isType && !sym.isOneOf(Deferred | JavaDefined)) @@ -560,7 +568,7 @@ object Checking { // The issue with `erased inline` is that the erased semantics get lost // as the code is inlined and the reference is removed before the erased usage check. checkCombination(Erased, Inline) - checkNoConflict(Lazy, ParamAccessor, s"parameter may not be `lazy`") + checkNoConflict(Lazy, ParamAccessor, em"parameter may not be `lazy`") } /** Check for illegal or redundant modifiers on modules. This is done separately @@ -599,7 +607,7 @@ object Checking { */ def checkNoPrivateLeaks(sym: Symbol)(using Context): Type = { class NotPrivate extends TypeMap { - var errors: List[() => String] = Nil + var errors: List[Message] = Nil private var inCaptureSet: Boolean = false def accessBoundary(sym: Symbol): Symbol = @@ -631,7 +639,7 @@ object Checking { var tp1 = if (isLeaked(tp.symbol)) { errors = - (() => em"non-private ${sym.showLocated} refers to private ${tp.symbol}\nin its type signature ${sym.info}") + em"non-private ${sym.showLocated} refers to private ${tp.symbol}\nin its type signature ${sym.info}" :: errors tp } @@ -672,7 +680,7 @@ object Checking { } val notPrivate = new NotPrivate val info = notPrivate(sym.info) - notPrivate.errors.foreach(error => report.errorOrMigrationWarning(error(), sym.srcPos, from = `3.0`)) + notPrivate.errors.foreach(report.errorOrMigrationWarning(_, sym.srcPos, from = `3.0`)) info } @@ -740,13 +748,16 @@ object Checking { if sym.isNoValue && !ctx.isJava then report.error(JavaSymbolIsNotAValue(sym), tree.srcPos) + /** Check that `tree` refers to a value, unless `tree` is selected or applied + * (singleton types x.type don't count as selections). + */ def checkValue(tree: Tree, proto: Type)(using Context): tree.type = tree match - case tree: RefTree - if tree.name.isTermName - && !proto.isInstanceOf[SelectionProto] - && !proto.isInstanceOf[FunOrPolyProto] => - checkValue(tree) + case tree: RefTree if tree.name.isTermName => + proto match + case _: SelectionProto if proto ne SingletonTypeProto => // no value check + case _: FunOrPolyProto => // no value check + case _ => checkValue(tree) case _ => tree @@ -807,13 +818,13 @@ trait Checking { /** Check that type `tp` is stable. */ def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = - if !tp.isStable then report.error(NotAPath(tp, kind), pos) + if !tp.isStable && !tp.isErroneous then report.error(NotAPath(tp, kind), pos) /** Check that all type members of `tp` have realizable bounds */ def checkRealizableBounds(cls: Symbol, pos: SrcPos)(using Context): Unit = { val rstatus = boundsRealizability(cls.thisType) if (rstatus ne Realizable) - report.error(ex"$cls cannot be instantiated since it${rstatus.msg}", pos) + report.error(em"$cls cannot be instantiated since it${rstatus.msg}", pos) } /** Check that pattern `pat` is irrefutable for scrutinee type `sel.tpe`. @@ -834,7 +845,7 @@ trait Checking { var reportedPt = pt.dropAnnot(defn.UncheckedAnnot) if !pat.tpe.isSingleton then reportedPt = reportedPt.widen val problem = if pat.tpe <:< reportedPt then "is more specialized than" else "does not match" - ex"pattern's type ${pat.tpe} $problem the right hand side expression's type $reportedPt" + em"pattern's type ${pat.tpe} $problem the right hand side expression's type $reportedPt" case RefutableExtractor => val extractor = val UnApply(fn, _, _) = pat: @unchecked @@ -843,6 +854,10 @@ trait Checking { case _ => EmptyTree if extractor.isEmpty then em"pattern binding uses refutable extractor" + else if extractor.symbol eq defn.QuoteMatching_ExprMatch then + em"pattern binding uses refutable extractor `'{...}`" + else if extractor.symbol eq defn.QuoteMatching_TypeMatch then + em"pattern binding uses refutable extractor `'[...]`" else em"pattern binding uses refutable extractor `$extractor`" @@ -862,10 +877,11 @@ trait Checking { else pat.srcPos def rewriteMsg = Message.rewriteNotice("This patch", `3.2-migration`) report.gradualErrorOrMigrationWarning( - em"""$message - | - |If $usage is intentional, this can be communicated by $fix, - |which $addendum.$rewriteMsg""", + message.append( + i"""| + | + |If $usage is intentional, this can be communicated by $fix, + |which $addendum.$rewriteMsg"""), pos, warnFrom = `3.2`, errorFrom = `future`) false } @@ -880,9 +896,9 @@ trait Checking { pat match case Bind(_, pat1) => recur(pat1, pt) - case UnApply(fn, _, pats) => + case UnApply(fn, implicits, pats) => check(pat, pt) && - (isIrrefutable(fn, pats.length) || fail(pat, pt, Reason.RefutableExtractor)) && { + (isIrrefutable(fn, pats.length) || isIrrefutableQuotedPattern(fn, implicits, pt) || fail(pat, pt, Reason.RefutableExtractor)) && { val argPts = unapplyArgs(fn.tpe.widen.finalResultType, fn, pats, pat.srcPos) pats.corresponds(argPts)(recur) } @@ -902,7 +918,7 @@ trait Checking { private def checkLegalImportOrExportPath(path: Tree, kind: String)(using Context): Unit = { checkStable(path.tpe, path.srcPos, kind) if (!ctx.isAfterTyper) Checking.checkRealizable(path.tpe, path.srcPos) - if !isIdempotentExpr(path) then + if !isIdempotentExpr(path) && !path.tpe.isErroneous then report.error(em"import prefix is not a pure expression", path.srcPos) } @@ -934,8 +950,8 @@ trait Checking { // we restrict wildcard export from package as incremental compilation does not yet // register a dependency on "all members of a package" - see https://github.com/sbt/zinc/issues/226 report.error( - em"Implementation restriction: ${path.tpe.classSymbol} is not a valid prefix " + - "for a wildcard export, as it is a package.", path.srcPos) + em"Implementation restriction: ${path.tpe.classSymbol} is not a valid prefix for a wildcard export, as it is a package", + path.srcPos) /** Check that module `sym` does not clash with a class of the same name * that is concurrently compiled in another source file. @@ -978,14 +994,15 @@ trait Checking { sym.srcPos) /** If `tree` is an application of a new-style implicit conversion (using the apply - * method of a `scala.Conversion` instance), check that implicit conversions are - * enabled. + * method of a `scala.Conversion` instance), check that the expected type is + * a convertible formal parameter type or that implicit conversions are enabled. */ - def checkImplicitConversionUseOK(tree: Tree)(using Context): Unit = + def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = val sym = tree.symbol if sym.name == nme.apply && sym.owner.derivesFrom(defn.ConversionClass) && !sym.info.isErroneous + && !expected.isConvertibleParam then def conv = methPart(tree) match case Select(qual, _) => qual.symbol.orElse(sym.owner) @@ -1021,8 +1038,8 @@ trait Checking { ("method", (n: Name) => s"method syntax .$n(...)") def rewriteMsg = Message.rewriteNotice("The latter", options = "-deprecation") report.deprecationWarning( - i"""Alphanumeric $kind $name is not declared ${hlAsKeyword("infix")}; it should not be used as infix operator. - |Instead, use ${alternative(name)} or backticked identifier `$name`.$rewriteMsg""", + em"""Alphanumeric $kind $name is not declared ${hlAsKeyword("infix")}; it should not be used as infix operator. + |Instead, use ${alternative(name)} or backticked identifier `$name`.$rewriteMsg""", tree.op.srcPos) if (ctx.settings.deprecation.value) { patch(Span(tree.op.span.start, tree.op.span.start), "`") @@ -1048,14 +1065,14 @@ trait Checking { def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = { def checkGoodBounds(tp: Type) = tp match { case tp @ TypeBounds(lo, hi) if !(lo <:< hi) => - report.error(ex"no type exists between low bound $lo and high bound $hi$where", pos) + report.error(em"no type exists between low bound $lo and high bound $hi$where", pos) TypeBounds(hi, hi) case _ => tp } tp match { case tp @ AndType(tp1, tp2) => - report.error(s"conflicting type arguments$where", pos) + report.error(em"conflicting type arguments$where", pos) tp1 case tp @ AppliedType(tycon, args) => tp.derivedAppliedType(tycon, args.mapConserve(checkGoodBounds)) @@ -1109,10 +1126,12 @@ trait Checking { def checkParentCall(call: Tree, caller: ClassSymbol)(using Context): Unit = if (!ctx.isAfterTyper) { val called = call.tpe.classSymbol + if (called.is(JavaAnnotation)) + report.error(em"${called.name} must appear without any argument to be a valid class parent because it is a Java annotation", call.srcPos) if (caller.is(Trait)) - report.error(i"$caller may not call constructor of $called", call.srcPos) + report.error(em"$caller may not call constructor of $called", call.srcPos) else if (called.is(Trait) && !caller.mixins.contains(called)) - report.error(i"""$called is already implemented by super${caller.superClass}, + report.error(em"""$called is already implemented by super${caller.superClass}, |its constructor cannot be called again""", call.srcPos) // Check that constructor call is of the form _.(args1)...(argsN). @@ -1121,7 +1140,7 @@ trait Checking { case Apply(fn, _) => checkLegalConstructorCall(fn, tree, "") case TypeApply(fn, _) => checkLegalConstructorCall(fn, tree, "type ") case Select(_, nme.CONSTRUCTOR) => // ok - case _ => report.error(s"too many ${kind}arguments in parent constructor", encl.srcPos) + case _ => report.error(em"too many ${kind}arguments in parent constructor", encl.srcPos) } call match { case Apply(fn, _) => checkLegalConstructorCall(fn, call, "") @@ -1171,7 +1190,7 @@ trait Checking { parent match { case parent: ClassSymbol => if (parent.is(Case)) - report.error(ex"""case $caseCls has case ancestor $parent, but case-to-case inheritance is prohibited. + report.error(em"""case $caseCls has case ancestor $parent, but case-to-case inheritance is prohibited. |To overcome this limitation, use extractors to pattern match on non-leaf nodes.""", pos) else checkCaseInheritance(parent.superClass, caseCls, pos) case _ => @@ -1182,15 +1201,11 @@ trait Checking { */ def checkNoForwardDependencies(vparams: List[ValDef])(using Context): Unit = vparams match { case vparam :: vparams1 => - val check = new TreeTraverser { - def traverse(tree: Tree)(using Context) = tree match { - case id: Ident if vparams.exists(_.symbol == id.symbol) => - report.error("illegal forward reference to method parameter", id.srcPos) - case _ => - traverseChildren(tree) - } + vparam.tpt.foreachSubTree { + case id: Ident if vparams.exists(_.symbol == id.symbol) => + report.error(em"illegal forward reference to method parameter", id.srcPos) + case _ => } - check.traverse(vparam.tpt) checkNoForwardDependencies(vparams1) case Nil => } @@ -1228,7 +1243,7 @@ trait Checking { if (t.span.isSourceDerived && owner == badOwner) t match { case t: RefTree if allowed(t.name, checkedSym) => - case _ => report.error(i"illegal reference to $checkedSym from $where", t.srcPos) + case _ => report.error(em"illegal reference to $checkedSym from $where", t.srcPos) } val sym = t.symbol t match { @@ -1262,6 +1277,23 @@ trait Checking { if !Inlines.inInlineMethod && !ctx.isInlineContext then report.error(em"$what can only be used in an inline method", pos) + /** Check that the class corresponding to this tree is either a Scala or Java annotation. + * + * @return The original tree or an error tree in case `tree` isn't a valid + * annotation or already an error tree. + */ + def checkAnnotClass(tree: Tree)(using Context): Tree = + if tree.tpe.isError then + return tree + val cls = Annotations.annotClass(tree) + if cls.is(JavaDefined) then + if !cls.is(JavaAnnotation) then + errorTree(tree, em"$cls is not a valid Java annotation: it was not declared with `@interface`") + else tree + else if !cls.derivesFrom(defn.AnnotationClass) then + errorTree(tree, em"$cls is not a valid Scala annotation: it does not extend `scala.annotation.Annotation`") + else tree + /** Check arguments of compiler-defined annotations */ def checkAnnotArgs(tree: Tree)(using Context): tree.type = val cls = Annotations.annotClass(tree) @@ -1328,7 +1360,7 @@ trait Checking { def ensureParentDerivesFrom(enumCase: Symbol)(using Context) = val enumCls = enumCase.owner.linkedClass if !firstParent.derivesFrom(enumCls) then - report.error(i"enum case does not extend its enum $enumCls", enumCase.srcPos) + report.error(em"enum case does not extend its enum $enumCls", enumCase.srcPos) cls.info match case info: ClassInfo => cls.info = info.derivedClassInfo(declaredParents = enumCls.typeRefApplied :: info.declaredParents) @@ -1366,9 +1398,9 @@ trait Checking { if (stat.symbol.isAllOf(EnumCase)) stat match { - case TypeDef(_, Template(DefDef(_, paramss, _, _), parents, _, _)) => + case TypeDef(_, impl @ Template(DefDef(_, paramss, _, _), _, _, _)) => paramss.foreach(_.foreach(check)) - parents.foreach(check) + impl.parents.foreach(check) case vdef: ValDef => vdef.rhs match { case Block((clsDef @ TypeDef(_, impl: Template)) :: Nil, _) @@ -1433,7 +1465,6 @@ trait Checking { def checkMatchable(tp: Type, pos: SrcPos, pattern: Boolean)(using Context): Unit = if !tp.derivesFrom(defn.MatchableClass) && sourceVersion.isAtLeast(`future-migration`) then - val kind = if pattern then "pattern selector" else "value" report.warning(MatchableWarning(tp, pattern), pos) /** Check that there is an implicit capability to throw a checked exception @@ -1516,7 +1547,7 @@ trait NoChecking extends ReChecking { override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = () override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = tp override def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = () - override def checkImplicitConversionUseOK(tree: Tree)(using Context): Unit = () + override def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = () override def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = tp override def checkAnnotArgs(tree: Tree)(using Context): tree.type = tree override def checkNoTargetNameConflict(stats: List[Tree])(using Context): Unit = () diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 044dd7bb8528..4087c5faf404 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -18,9 +18,6 @@ class CrossVersionChecks extends MiniPhase: override def description: String = CrossVersionChecks.description - override def runsAfterGroupsOf: Set[String] = Set(FirstTransform.name) - // We assume all type trees except TypeTree have been eliminated - // Note: if a symbol has both @deprecated and @migration annotations and both // warnings are enabled, only the first one checked here will be emitted. // I assume that's a consequence of some code trying to avoid noise by suppressing @@ -52,7 +49,8 @@ class CrossVersionChecks extends MiniPhase: owner.isDeprecated || isEnumOwner(owner) - /**Scan the chain of outer declaring scopes from the current context + /**Skip warnings for synthetic members of case classes during declaration and + * scan the chain of outer declaring scopes from the current context * a deprecation warning will be skipped if one the following holds * for a given declaring scope: * - the symbol associated with the scope is also deprecated. @@ -60,27 +58,20 @@ class CrossVersionChecks extends MiniPhase: * a module that declares `sym`, or the companion class of the * module that declares `sym`. */ - def skipWarning(using Context) = - ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated) + def skipWarning(using Context): Boolean = + (ctx.owner.is(Synthetic) && sym.is(CaseClass)) + || ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated) - for annot <- sym.getAnnotation(defn.DeprecatedAnnot) do + // Also check for deprecation of the companion class for synthetic methods + val toCheck = sym :: (if sym.isAllOf(SyntheticMethod) then sym.owner.companionClass :: Nil else Nil) + for sym <- toCheck; annot <- sym.getAnnotation(defn.DeprecatedAnnot) do if !skipWarning then val msg = annot.argumentConstant(0).map(": " + _.stringValue).getOrElse("") val since = annot.argumentConstant(1).map(" since " + _.stringValue).getOrElse("") - report.deprecationWarning(s"${sym.showLocated} is deprecated${since}${msg}", pos) - - private def checkExperimentalSignature(sym: Symbol, pos: SrcPos)(using Context): Unit = - class Checker extends TypeTraverser: - def traverse(tp: Type): Unit = - if tp.typeSymbol.isExperimental then - Feature.checkExperimentalDef(tp.typeSymbol, pos) - else - traverseChildren(tp) - if !sym.isInExperimentalScope then - new Checker().traverse(sym.info) + report.deprecationWarning(em"${sym.showLocated} is deprecated${since}${msg}", pos) private def checkExperimentalAnnots(sym: Symbol)(using Context): Unit = - if !sym.isInExperimentalScope then + if sym.exists && !sym.isInExperimentalScope then for annot <- sym.annotations if annot.symbol.isExperimental do Feature.checkExperimentalDef(annot.symbol, annot.tree) @@ -110,36 +101,25 @@ class CrossVersionChecks extends MiniPhase: !sym.isDeprecated && !sym.is(Deferred)) if (!concrOvers.isEmpty) report.deprecationWarning( - symbol.toString + " overrides concrete, non-deprecated symbol(s):" + - concrOvers.map(_.name).mkString(" ", ", ", ""), tree.srcPos) + em"""$symbol overrides concrete, non-deprecated definition(s): + | ${concrOvers.map(_.name).mkString(", ")}""", + tree.srcPos) } } - /** Check that classes extending experimental classes or nested in experimental classes have the @experimental annotation. */ - private def checkExperimentalInheritance(cls: ClassSymbol)(using Context): Unit = - if !cls.isAnonymousClass && !cls.hasAnnotation(defn.ExperimentalAnnot) then - cls.info.parents.find(_.typeSymbol.isExperimental) match - case Some(parent) => - report.error(em"extension of experimental ${parent.typeSymbol} must have @experimental annotation", cls.srcPos) - case _ => - end checkExperimentalInheritance - override def transformValDef(tree: ValDef)(using Context): ValDef = checkDeprecatedOvers(tree) checkExperimentalAnnots(tree.symbol) - checkExperimentalSignature(tree.symbol, tree) tree override def transformDefDef(tree: DefDef)(using Context): DefDef = checkDeprecatedOvers(tree) checkExperimentalAnnots(tree.symbol) - checkExperimentalSignature(tree.symbol, tree) tree - override def transformTemplate(tree: Template)(using Context): Tree = - val cls = ctx.owner.asClass - checkExperimentalInheritance(cls) - checkExperimentalAnnots(cls) + override def transformTypeDef(tree: TypeDef)(using Context): TypeDef = + // TODO do we need to check checkDeprecatedOvers(tree)? + checkExperimentalAnnots(tree.symbol) tree override def transformIdent(tree: Ident)(using Context): Ident = { @@ -171,19 +151,14 @@ class CrossVersionChecks extends MiniPhase: tree } - override def transformTypeDef(tree: TypeDef)(using Context): TypeDef = { - checkExperimentalAnnots(tree.symbol) + override def transformOther(tree: Tree)(using Context): Tree = + tree.foreachSubTree { // Find references in type trees and imports + case tree: Ident => transformIdent(tree) + case tree: Select => transformSelect(tree) + case tree: TypeTree => transformTypeTree(tree) + case _ => + } tree - } - - override def transformOther(tree: Tree)(using Context): Tree = tree match - case tree: Import => - tree.foreachSubTree { - case t: RefTree => checkUndesiredProperties(t.symbol, t.srcPos) - case _ => - } - tree - case _ => tree end CrossVersionChecks diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index d2165a5ca8c5..8fdc468780ba 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -44,7 +44,7 @@ trait Deriving { private def addDerivedInstance(clsName: Name, info: Type, pos: SrcPos): Unit = { val instanceName = "derived$".concat(clsName) if (ctx.denotNamed(instanceName).exists) - report.error(i"duplicate type class derivation for $clsName", pos) + report.error(em"duplicate type class derivation for $clsName", pos) else // If we set the Synthetic flag here widenGiven will widen too far and the // derived instance will have too low a priority to be selected over a freshly @@ -90,7 +90,7 @@ trait Deriving { xs.corresponds(ys)((x, y) => x.paramInfo.hasSameKindAs(y.paramInfo)) def cannotBeUnified = - report.error(i"${cls.name} cannot be unified with the type argument of ${typeClass.name}", derived.srcPos) + report.error(em"${cls.name} cannot be unified with the type argument of ${typeClass.name}", derived.srcPos) def addInstance(derivedParams: List[TypeSymbol], evidenceParamInfos: List[List[Type]], instanceTypes: List[Type]): Unit = { val resultType = typeClassType.appliedTo(instanceTypes) @@ -252,7 +252,7 @@ trait Deriving { if (typeClassArity == 1) deriveSingleParameter else if (typeClass == defn.CanEqualClass) deriveCanEqual else if (typeClassArity == 0) - report.error(i"type ${typeClass.name} in derives clause of ${cls.name} has no type parameters", derived.srcPos) + report.error(em"type ${typeClass.name} in derives clause of ${cls.name} has no type parameters", derived.srcPos) else cannotBeUnified } diff --git a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala index 5fefd355d7d8..d819528ff556 100644 --- a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala +++ b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala @@ -37,7 +37,7 @@ object Docstrings { case List(df: tpd.DefDef) => usecase.typed(df) case _ => - report.error("`@usecase` was not a valid definition", ctx.source.atSpan(usecase.codePos)) + report.error(em"`@usecase` was not a valid definition", ctx.source.atSpan(usecase.codePos)) usecase } } diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index 1630ce31e4c6..717966923708 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -2,20 +2,22 @@ package dotty.tools package dotc package typer -import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Names.{Name, TermName} -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.TypeErasure -import util.Spans._ -import core.Symbols._ -import ErrorReporting._ -import reporting._ +import util.Spans.* +import core.Symbols.* +import ErrorReporting.* +import dotty.tools.dotc.transform.ValueClasses +import dotty.tools.dotc.transform.TypeUtils.isPrimitiveValueType +import reporting.* object Dynamic { private def isDynamicMethod(name: Name): Boolean = @@ -80,7 +82,7 @@ trait Dynamic { val args = tree.args val dynName = if (args.exists(isNamedArg)) nme.applyDynamicNamed else nme.applyDynamic if (dynName == nme.applyDynamicNamed && untpd.isWildcardStarArgList(args)) - errorTree(tree, "applyDynamicNamed does not support passing a vararg parameter") + errorTree(tree, em"applyDynamicNamed does not support passing a vararg parameter") else { def namedArgTuple(name: String, arg: untpd.Tree) = untpd.Tuple(List(Literal(Constant(name)), arg)) def namedArgs = args.map { @@ -179,12 +181,12 @@ trait Dynamic { val vargss = termArgss(tree) def structuralCall(selectorName: TermName, classOfs: => List[Tree]) = { - val selectable = adapt(qual, defn.SelectableClass.typeRef) + val selectable = adapt(qual, defn.SelectableClass.typeRef | defn.DynamicClass.typeRef) // ($qual: Selectable).$selectorName("$name") val base = untpd.Apply( - untpd.TypedSplice(selectable.select(selectorName)).withSpan(fun.span), + untpd.Select(untpd.TypedSplice(selectable), selectorName).withSpan(fun.span), (Literal(Constant(name.encode.toString)) :: Nil).map(untpd.TypedSplice(_))) val scall = @@ -214,9 +216,33 @@ trait Dynamic { def fail(reason: String): Tree = errorTree(tree, em"Structural access not allowed on method $name because it $reason") + extension (tree: Tree) + /** The implementations of `selectDynamic` and `applyDynamic` in `scala.reflect.SelectDynamic` have no information about the expected return type of a value/method which was declared in the refinement, + * only the JVM type after erasure can be obtained through reflection, e.g. + * + * class Foo(val i: Int) extends AnyVal + * class Reflective extends reflect.Selectable + * val reflective = new Reflective { + * def foo = Foo(1) // Foo at compile time, java.lang.Integer in reflection + * } + * + * Because of that reflective access cannot be implemented properly in `scala.reflect.SelectDynamic` itself + * because it's not known there if the value should be wrapped in a value class constructor call or not. + * Hence the logic of wrapping is performed here, relying on the fact that the implementations of `selectDynamic` and `applyDynamic` in `scala.reflect.SelectDynamic` are final. + */ + def maybeBoxingCast(tpe: Type) = + val maybeBoxed = + if ValueClasses.isDerivedValueClass(tpe.classSymbol) && qual.tpe <:< defn.ReflectSelectableTypeRef then + val genericUnderlying = ValueClasses.valueClassUnbox(tpe.classSymbol.asClass) + val underlying = tpe.select(genericUnderlying).widen.resultType + New(tpe, tree.cast(underlying) :: Nil) + else + tree + maybeBoxed.cast(tpe) + fun.tpe.widen match { case tpe: ValueType => - structuralCall(nme.selectDynamic, Nil).cast(tpe) + structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(tpe) case tpe: MethodType => def isDependentMethod(tpe: Type): Boolean = tpe match { @@ -236,7 +262,7 @@ trait Dynamic { fail(i"has a parameter type with an unstable erasure") :: Nil else TypeErasure.erasure(tpe).asInstanceOf[MethodType].paramInfos.map(clsOf(_)) - structuralCall(nme.applyDynamic, classOfs).cast(tpe.finalResultType) + structuralCall(nme.applyDynamic, classOfs).maybeBoxingCast(tpe.finalResultType) } // (@allanrenucci) I think everything below is dead code diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index 3034253adb61..126d109889e1 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -10,11 +10,9 @@ import Trees._ import NameOps._ import util.SrcPos import config.Feature -import java.util.regex.Matcher.quoteReplacement import reporting._ import collection.mutable -import scala.util.matching.Regex object ErrorReporting { @@ -26,9 +24,6 @@ object ErrorReporting { def errorTree(tree: untpd.Tree, msg: Message)(using Context): tpd.Tree = errorTree(tree, msg, tree.srcPos) - def errorTree(tree: untpd.Tree, msg: => String)(using Context): tpd.Tree = - errorTree(tree, msg.toMessage) - def errorTree(tree: untpd.Tree, msg: TypeError, pos: SrcPos)(using Context): tpd.Tree = tree.withType(errorType(msg, pos)) @@ -37,9 +32,6 @@ object ErrorReporting { ErrorType(msg) } - def errorType(msg: => String, pos: SrcPos)(using Context): ErrorType = - errorType(msg.toMessage, pos) - def errorType(ex: TypeError, pos: SrcPos)(using Context): ErrorType = { report.error(ex, pos) ErrorType(ex.toMessage) @@ -49,12 +41,24 @@ object ErrorReporting { errorType(WrongNumberOfTypeArgs(fntpe, expectedArgs, actual), pos) def missingArgs(tree: Tree, mt: Type)(using Context): Unit = + def isCallableWithoutArgumentsLists(mt: Type): Boolean = mt match + case pt: PolyType => isCallableWithoutArgumentsLists(pt.resType) + case mt: MethodType if mt.isImplicitMethod => isCallableWithoutArgumentsLists(mt.resType) + case mt: MethodType => false + case _ => true + def isCallableWithSingleEmptyArgumentList(mt: Type): Boolean = + mt match + case mt: MethodType if mt.paramNames.isEmpty => isCallableWithoutArgumentsLists(mt.resType) + case mt: MethodType if mt.isImplicitMethod => isCallableWithSingleEmptyArgumentList(mt.resType) + case pt: PolyType => isCallableWithSingleEmptyArgumentList(pt.resType) + case _ => false val meth = err.exprStr(methPart(tree)) - mt match - case mt: MethodType if mt.paramNames.isEmpty => - report.error(MissingEmptyArgumentList(meth), tree.srcPos) - case _ => - report.error(em"missing arguments for $meth", tree.srcPos) + val info = if tree.symbol.exists then tree.symbol.info else mt + if isCallableWithSingleEmptyArgumentList(info) then + report.error(MissingEmptyArgumentList(meth), tree.srcPos) + else + report.error(MissingArgumentList(meth, tree.symbol), tree.srcPos) + def matchReductionAddendum(tps: Type*)(using Context): String = val collectMatchTrace = new TypeAccumulator[String]: @@ -87,18 +91,18 @@ object ErrorReporting { def expectedTypeStr(tp: Type): String = tp match { case tp: PolyProto => - em"type arguments [${tp.targs.tpes}%, %] and ${expectedTypeStr(revealDeepenedArgs(tp.resultType))}" + i"type arguments [${tp.targs.tpes}%, %] and ${expectedTypeStr(revealDeepenedArgs(tp.resultType))}" case tp: FunProto => def argStr(tp: FunProto): String = val result = revealDeepenedArgs(tp.resultType) match { case restp: FunProto => argStr(restp) case _: WildcardType | _: IgnoredProto => "" - case tp => em" and expected result type $tp" + case tp => i" and expected result type $tp" } - em"(${tp.typedArgs().tpes}%, %)$result" + i"(${tp.typedArgs().tpes}%, %)$result" s"arguments ${argStr(tp)}" case _ => - em"expected type $tp" + i"expected type $tp" } def anonymousTypeMemberStr(tpe: Type): String = { @@ -107,12 +111,12 @@ object ErrorReporting { case _: MethodOrPoly => "method" case _ => "value of type" } - em"$kind $tpe" + i"$kind $tpe" } def overloadedAltsStr(alts: List[SingleDenotation]): String = - em"overloaded alternatives of ${denotStr(alts.head)} with types\n" + - em" ${alts map (_.info)}%\n %" + i"""overloaded alternatives of ${denotStr(alts.head)} with types + | ${alts map (_.info)}%\n %""" def denotStr(denot: Denotation): String = if (denot.isOverloaded) overloadedAltsStr(denot.alternatives) @@ -130,13 +134,30 @@ object ErrorReporting { case _ => anonymousTypeMemberStr(tp) } + /** Explain info of symbol `sym` as a member of class `base`. + * @param showLocation if true also show sym's location. + */ + def infoString(sym: Symbol, base: Type, showLocation: Boolean): String = + val sym1 = sym.underlyingSymbol + def info = base.memberInfo(sym1) + val infoStr = + if sym1.isAliasType then i", which equals ${info.bounds.hi}" + else if sym1.isAbstractOrParamType && info != TypeBounds.empty then i" with bounds$info" + else if sym1.is(Module) then "" + else if sym1.isTerm then i" of type $info" + else "" + i"${if showLocation then sym1.showLocated else sym1}$infoStr" + + def infoStringWithLocation(sym: Symbol, base: Type) = + infoString(sym, base, showLocation = true) + def exprStr(tree: Tree): String = refStr(tree.tpe) - def takesNoParamsStr(tree: Tree, kind: String): String = + def takesNoParamsMsg(tree: Tree, kind: String): Message = if (tree.tpe.widen.exists) - i"${exprStr(tree)} does not take ${kind}parameters" + em"${exprStr(tree)} does not take ${kind}parameters" else { - i"undefined: $tree # ${tree.uniqueId}: ${tree.tpe.toString} at ${ctx.phase}" + em"undefined: $tree # ${tree.uniqueId}: ${tree.tpe.toString} at ${ctx.phase}" } def patternConstrStr(tree: Tree): String = ??? @@ -187,7 +208,9 @@ object ErrorReporting { |The tests were made under $constraintText""" def whyFailedStr(fail: FailedExtension) = - i""" failed with + i""" + | + | failed with: | |${fail.whyFailed.message.indented(8)}""" @@ -255,201 +278,9 @@ object ErrorReporting { ownerSym.typeRef.nonClassTypeMembers.map(_.symbol) }.toList - def dependentStr = + def dependentMsg = """Term-dependent types are experimental, - |they must be enabled with a `experimental.dependent` language import or setting""".stripMargin + |they must be enabled with a `experimental.dependent` language import or setting""".stripMargin.toMessage def err(using Context): Errors = new Errors } - -class ImplicitSearchError( - arg: tpd.Tree, - pt: Type, - where: String, - paramSymWithMethodCallTree: Option[(Symbol, tpd.Tree)] = None, - ignoredInstanceNormalImport: => Option[SearchSuccess], - importSuggestionAddendum: => String -)(using ctx: Context) { - - def missingArgMsg = arg.tpe match { - case ambi: AmbiguousImplicits => - (ambi.alt1, ambi.alt2) match { - case (alt @ AmbiguousImplicitMsg(msg), _) => - userDefinedAmbiguousImplicitMsg(alt, msg) - case (_, alt @ AmbiguousImplicitMsg(msg)) => - userDefinedAmbiguousImplicitMsg(alt, msg) - case _ => - defaultAmbiguousImplicitMsg(ambi) - } - case ambi @ TooUnspecific(target) => - ex"""No implicit search was attempted${location("for")} - |since the expected type $target is not specific enough""" - case _ => - val shortMessage = userDefinedImplicitNotFoundParamMessage - .orElse(userDefinedImplicitNotFoundTypeMessage) - .getOrElse(defaultImplicitNotFoundMessage) - formatMsg(shortMessage)() - ++ hiddenImplicitsAddendum - ++ ErrorReporting.matchReductionAddendum(pt) - } - - private def formatMsg(shortForm: String)(headline: String = shortForm) = arg match - case arg: Trees.SearchFailureIdent[?] => - arg.tpe match - case _: NoMatchingImplicits => headline - case tpe: SearchFailureType => - i"$headline. ${tpe.explanation}" - case _ => headline - case _ => - arg.tpe match - case tpe: SearchFailureType => - val original = arg match - case Inlined(call, _, _) => call - case _ => arg - i"""$headline. - |I found: - | - | ${original.show.replace("\n", "\n ")} - | - |But ${tpe.explanation}.""" - case _ => headline - - /** Format `raw` implicitNotFound or implicitAmbiguous argument, replacing - * all occurrences of `${X}` where `X` is in `paramNames` with the - * corresponding shown type in `args`. - */ - private def userDefinedErrorString(raw: String, paramNames: List[String], args: List[Type]): String = { - def translate(name: String): Option[String] = { - val idx = paramNames.indexOf(name) - if (idx >= 0) Some(ex"${args(idx)}") else None - } - - """\$\{\s*([^}\s]+)\s*\}""".r.replaceAllIn(raw, (_: Regex.Match) match { - case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("")).nn - }) - } - - /** Extract a user defined error message from a symbol `sym` - * with an annotation matching the given class symbol `cls`. - */ - private def userDefinedMsg(sym: Symbol, cls: Symbol) = for { - ann <- sym.getAnnotation(cls) - msg <- ann.argumentConstantString(0) - } yield msg - - private def location(preposition: String) = if (where.isEmpty) "" else s" $preposition $where" - - private def defaultAmbiguousImplicitMsg(ambi: AmbiguousImplicits) = - s"Ambiguous given instances: ${ambi.explanation}${location("of")}" - - private def defaultImplicitNotFoundMessage = - ex"No given instance of type $pt was found${location("for")}" - - /** Construct a custom error message given an ambiguous implicit - * candidate `alt` and a user defined message `raw`. - */ - private def userDefinedAmbiguousImplicitMsg(alt: SearchSuccess, raw: String) = { - val params = alt.ref.underlying match { - case p: PolyType => p.paramNames.map(_.toString) - case _ => Nil - } - def resolveTypes(targs: List[tpd.Tree])(using Context) = - targs.map(a => Inferencing.fullyDefinedType(a.tpe, "type argument", a.srcPos)) - - // We can extract type arguments from: - // - a function call: - // @implicitAmbiguous("msg A=${A}") - // implicit def f[A](): String = ... - // implicitly[String] // found: f[Any]() - // - // - an eta-expanded function: - // @implicitAmbiguous("msg A=${A}") - // implicit def f[A](x: Int): String = ... - // implicitly[Int => String] // found: x => f[Any](x) - - val call = tpd.closureBody(alt.tree) // the tree itself if not a closure - val targs = tpd.typeArgss(call).flatten - val args = resolveTypes(targs)(using ctx.fresh.setTyperState(alt.tstate)) - userDefinedErrorString(raw, params, args) - } - - /** @param rawMsg Message template with variables, e.g. "Variable A is ${A}" - * @param sym Symbol of the annotated type or of the method whose parameter was annotated - * @param substituteType Function substituting specific types for abstract types associated with variables, e.g A -> Int - */ - private def formatAnnotationMessage(rawMsg: String, sym: Symbol, substituteType: Type => Type): String = { - val substitutableTypesSymbols = ErrorReporting.substitutableTypeSymbolsInScope(sym) - - userDefinedErrorString( - rawMsg, - paramNames = substitutableTypesSymbols.map(_.name.unexpandedName.toString), - args = substitutableTypesSymbols.map(_.typeRef).map(substituteType) - ) - } - - /** Extracting the message from a method parameter, e.g. in - * - * trait Foo - * - * def foo(implicit @annotation.implicitNotFound("Foo is missing") foo: Foo): Any = ??? - */ - private def userDefinedImplicitNotFoundParamMessage: Option[String] = paramSymWithMethodCallTree.flatMap { (sym, applTree) => - userDefinedMsg(sym, defn.ImplicitNotFoundAnnot).map { rawMsg => - val fn = tpd.funPart(applTree) - val targs = tpd.typeArgss(applTree).flatten - val methodOwner = fn.symbol.owner - val methodOwnerType = tpd.qualifier(fn).tpe - val methodTypeParams = fn.symbol.paramSymss.flatten.filter(_.isType) - val methodTypeArgs = targs.map(_.tpe) - val substituteType = (_: Type).asSeenFrom(methodOwnerType, methodOwner).subst(methodTypeParams, methodTypeArgs) - formatAnnotationMessage(rawMsg, sym.owner, substituteType) - } - } - - /** Extracting the message from a type, e.g. in - * - * @annotation.implicitNotFound("Foo is missing") - * trait Foo - * - * def foo(implicit foo: Foo): Any = ??? - */ - private def userDefinedImplicitNotFoundTypeMessage: Option[String] = - def recur(tp: Type): Option[String] = tp match - case tp: TypeRef => - val sym = tp.symbol - userDefinedImplicitNotFoundTypeMessage(sym).orElse(recur(tp.info)) - case tp: ClassInfo => - tp.baseClasses.iterator - .map(userDefinedImplicitNotFoundTypeMessage) - .find(_.isDefined).flatten - case tp: TypeProxy => - recur(tp.superType) - case tp: AndType => - recur(tp.tp1).orElse(recur(tp.tp2)) - case _ => - None - recur(pt) - - private def userDefinedImplicitNotFoundTypeMessage(sym: Symbol): Option[String] = - for - rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) - if Feature.migrateTo3 || sym != defn.Function1 - // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore - yield - val substituteType = (_: Type).asSeenFrom(pt, sym) - formatAnnotationMessage(rawMsg, sym, substituteType) - - private def hiddenImplicitsAddendum: String = - def hiddenImplicitNote(s: SearchSuccess) = - em"\n\nNote: ${s.ref.symbol.showLocated} was not considered because it was not imported with `import given`." - - val normalImports = ignoredInstanceNormalImport.map(hiddenImplicitNote) - - normalImports.getOrElse(importSuggestionAddendum) - end hiddenImplicitsAddendum - - private object AmbiguousImplicitMsg { - def unapply(search: SearchSuccess): Option[String] = - userDefinedMsg(search.ref.symbol, defn.ImplicitAmbiguousAnnot) - } -} diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index 46725f0fa6b2..b1513df777ec 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -285,8 +285,9 @@ object EtaExpansion extends LiftImpure { val body = Apply(lifted, ids) if (mt.isContextualMethod) body.setApplyKind(ApplyKind.Using) val fn = - if (mt.isContextualMethod) new untpd.FunctionWithMods(params, body, Modifiers(Given)) - else if (mt.isImplicitMethod) new untpd.FunctionWithMods(params, body, Modifiers(Implicit)) + if (mt.isContextualMethod) new untpd.FunctionWithMods(params, body, Modifiers(Given), mt.erasedParams) + else if (mt.isImplicitMethod) new untpd.FunctionWithMods(params, body, Modifiers(Implicit), mt.erasedParams) + else if (mt.hasErasedParams) new untpd.FunctionWithMods(params, body, Modifiers(), mt.erasedParams) else untpd.Function(params, body) if (defs.nonEmpty) untpd.Block(defs.toList map (untpd.TypedSplice(_)), fn) else fn } diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 42c78dcfb32c..4bbd6ee080b6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -31,6 +31,7 @@ import Feature.migrateTo3 import config.Printers.{implicits, implicitsDetailed} import collection.mutable import reporting._ +import transform.Splicer import annotation.tailrec import scala.annotation.internal.sharable @@ -48,17 +49,19 @@ object Implicits: } /** Both search candidates and successes are references with a specific nesting level. */ - sealed trait RefAndLevel { + sealed trait RefAndLevel extends Showable { def ref: TermRef def level: Int } /** An eligible implicit candidate, consisting of an implicit reference and a nesting level */ - case class Candidate(implicitRef: ImplicitRef, kind: Candidate.Kind, level: Int) extends RefAndLevel { + case class Candidate(implicitRef: ImplicitRef, kind: Candidate.Kind, level: Int) extends RefAndLevel with Showable { def ref: TermRef = implicitRef.underlyingRef def isExtension = (kind & Candidate.Extension) != 0 def isConversion = (kind & Candidate.Conversion) != 0 + + def toText(printer: Printer): Text = printer.toText(this) } object Candidate { type Kind = Int @@ -435,20 +438,15 @@ object Implicits: final protected def qualify(using Context): String = expectedType match { case SelectionProto(name, mproto, _, _) if !argument.isEmpty => - em"provide an extension method `$name` on ${argument.tpe}" + i"provide an extension method `$name` on ${argument.tpe}" case NoType => - if (argument.isEmpty) em"match expected type" - else em"convert from ${argument.tpe} to expected type" + if (argument.isEmpty) i"match expected type" + else i"convert from ${argument.tpe} to expected type" case _ => - if (argument.isEmpty) em"match type ${clarify(expectedType)}" - else em"convert from ${argument.tpe} to ${clarify(expectedType)}" + if (argument.isEmpty) i"match type ${clarify(expectedType)}" + else i"convert from ${argument.tpe} to ${clarify(expectedType)}" } - /** An explanation of the cause of the failure as a string */ - def explanation(using Context): String - - def msg(using Context): Message = explanation.toMessage - /** If search was for an implicit conversion, a note describing the failure * in more detail - this is either empty or starts with a '\n' */ @@ -488,8 +486,9 @@ object Implicits: map(tp) } - def explanation(using Context): String = + def msg(using Context): Message = em"no implicit values were found that $qualify" + override def toString = s"NoMatchingImplicits($expectedType, $argument)" } @@ -509,20 +508,20 @@ object Implicits: i""" |Note that implicit conversions were not tried because the result of an implicit conversion |must be more specific than $target""" - override def explanation(using Context) = - i"""${super.explanation}. - |The expected type $target is not specific enough, so no search was attempted""" + + override def msg(using Context) = + super.msg.append("\nThe expected type $target is not specific enough, so no search was attempted") override def toString = s"TooUnspecific" /** An ambiguous implicits failure */ class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree) extends SearchFailureType { - def explanation(using Context): String = + def msg(using Context): Message = var str1 = err.refStr(alt1.ref) var str2 = err.refStr(alt2.ref) if str1 == str2 then str1 = ctx.printer.toTextRef(alt1.ref).show str2 = ctx.printer.toTextRef(alt2.ref).show - em"both $str1 and $str2 $qualify" + em"both $str1 and $str2 $qualify".withoutDisambiguation() override def whyNoConversion(using Context): String = if !argument.isEmpty && argument.tpe.widen.isRef(defn.NothingClass) then "" @@ -536,21 +535,21 @@ object Implicits: class MismatchedImplicit(ref: TermRef, val expectedType: Type, val argument: Tree) extends SearchFailureType { - def explanation(using Context): String = + def msg(using Context): Message = em"${err.refStr(ref)} does not $qualify" } class DivergingImplicit(ref: TermRef, val expectedType: Type, val argument: Tree) extends SearchFailureType { - def explanation(using Context): String = + def msg(using Context): Message = em"${err.refStr(ref)} produces a diverging implicit search when trying to $qualify" } /** A search failure type for attempted ill-typed extension method calls */ class FailedExtension(extApp: Tree, val expectedType: Type, val whyFailed: Message) extends SearchFailureType: def argument = EmptyTree - def explanation(using Context) = em"$extApp does not $qualify" + def msg(using Context) = em"$extApp does not $qualify" /** A search failure type for aborted searches of extension methods, typically * because of a cyclic reference or similar. @@ -558,7 +557,6 @@ object Implicits: class NestedFailure(_msg: Message, val expectedType: Type) extends SearchFailureType: def argument = EmptyTree override def msg(using Context) = _msg - def explanation(using Context) = msg.toString /** A search failure type for failed synthesis of terms for special types */ class SynthesisFailure(reasons: List[String], val expectedType: Type) extends SearchFailureType: @@ -568,10 +566,16 @@ object Implicits: if reasons.length > 1 then reasons.mkString("\n\t* ", "\n\t* ", "") else - reasons.mkString + reasons.mkString(" ", "", "") - def explanation(using Context) = em"Failed to synthesize an instance of type ${clarify(expectedType)}: ${formatReasons}" + def msg(using Context) = em"Failed to synthesize an instance of type ${clarify(expectedType)}:${formatReasons}" + class MacroErrorsFailure(errors: List[Diagnostic.Error], + val expectedType: Type, + val argument: Tree) extends SearchFailureType { + def msg(using Context): Message = + em"${errors.map(_.msg).mkString("\n")}" + } end Implicits import Implicits._ @@ -620,6 +624,8 @@ trait ImplicitRunInfo: traverse(t.prefix) case t: ThisType if t.cls.is(Module) && t.cls.isStaticOwner => traverse(t.cls.sourceModule.termRef) + case t: ThisType => + traverse(t.tref) case t: ConstantType => traverse(t.underlying) case t: TypeParamRef => @@ -635,7 +641,7 @@ trait ImplicitRunInfo: def apply(tp: Type): collection.Set[Type] = parts = mutable.LinkedHashSet() - partSeen.clear() + partSeen.clear(resetToInitial = false) traverse(tp) parts end collectParts @@ -741,6 +747,7 @@ trait ImplicitRunInfo: * - If `T` is a singleton reference, the anchors of its underlying type, plus, * if `T` is of the form `(P#x).type`, the anchors of `P`. * - If `T` is the this-type of a static object, the anchors of a term reference to that object. + * - If `T` is some other this-type `P.this.type`, the anchors of `P`. * - If `T` is some other type, the union of the anchors of each constituent type of `T`. * * The _implicit scope_ of a type `tp` is the smallest set S of term references (i.e. TermRefs) @@ -851,7 +858,7 @@ trait Implicits: inferred match { case SearchSuccess(_, ref, _, false) if isOldStyleFunctionConversion(ref.underlying) => report.migrationWarning( - i"The conversion ${ref} will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views.", + em"The conversion ${ref} will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views.", from ) case _ => @@ -905,7 +912,7 @@ trait Implicits: pt: Type, where: String, paramSymWithMethodCallTree: Option[(Symbol, Tree)] = None - )(using Context): String = { + )(using Context): Message = { def findHiddenImplicitsCtx(c: Context): Context = if c == NoContext then c else c.freshOver(findHiddenImplicitsCtx(c.outer)).addMode(Mode.FindHiddenImplicits) @@ -928,8 +935,34 @@ trait Implicits: // example where searching for a nested type causes an infinite loop. None - val error = new ImplicitSearchError(arg, pt, where, paramSymWithMethodCallTree, ignoredInstanceNormalImport, importSuggestionAddendum(pt)) - error.missingArgMsg + def allImplicits(currImplicits: ContextualImplicits): List[ImplicitRef] = + if currImplicits.outerImplicits == null then currImplicits.refs + else currImplicits.refs ::: allImplicits(currImplicits.outerImplicits) + + /** Whether the given type is for an implicit def that's a Scala 2 implicit conversion */ + def isImplicitDefConversion(typ: Type): Boolean = typ match { + case PolyType(_, resType) => isImplicitDefConversion(resType) + case mt: MethodType => !mt.isImplicitMethod && !mt.isContextualMethod + case _ => false + } + + def ignoredConvertibleImplicits = arg.tpe match + case fail: SearchFailureType => + if (fail.expectedType eq pt) || isFullyDefined(fail.expectedType, ForceDegree.none) then + // Get every implicit in scope and try to convert each + allImplicits(ctx.implicits) + .view + .map(_.underlyingRef) + .distinctBy(_.denot) + .filter { imp => + !isImplicitDefConversion(imp.underlying) + && imp.symbol != defn.Predef_conforms + && viewExists(imp, fail.expectedType) + } + else + Nil + + MissingImplicitArgument(arg, pt, where, paramSymWithMethodCallTree, ignoredInstanceNormalImport, ignoredConvertibleImplicits) } /** A string indicating the formal parameter corresponding to a missing argument */ @@ -938,11 +971,11 @@ trait Implicits: case Select(qual, nme.apply) if defn.isFunctionType(qual.tpe.widen) => val qt = qual.tpe.widen val qt1 = qt.dealiasKeepAnnots - def addendum = if (qt1 eq qt) "" else (i"\nThe required type is an alias of: $qt1") - em"parameter of ${qual.tpe.widen}$addendum" + def addendum = if (qt1 eq qt) "" else (i"\nWhere $qt is an alias of: $qt1") + i"parameter of ${qual.tpe.widen}$addendum" case _ => - em"${ if paramName.is(EvidenceParamName) then "an implicit parameter" - else s"parameter $paramName" } of $methodStr" + i"${ if paramName.is(EvidenceParamName) then "an implicit parameter" + else s"parameter $paramName" } of $methodStr" } /** A CanEqual[T, U] instance is assumed @@ -1008,11 +1041,10 @@ trait Implicits: if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase.phaseName}" else i"type error: ${argument.tpe} does not conform to $pt${err.whyNoMatchStr(argument.tpe, pt)}") - if pt.unusableForInference - || !argument.isEmpty && argument.tpe.unusableForInference - then return NoMatchingImplicitsFailure + val usableForInference = !pt.unusableForInference + && (argument.isEmpty || !argument.tpe.unusableForInference) - val result0 = + val result0 = if usableForInference then // If we are searching implicits when resolving an import symbol, start the search // in the first enclosing context that does not have the same scope and owner as the current // context. Without that precaution, an eligible implicit in the current scope @@ -1029,7 +1061,7 @@ trait Implicits: catch case ce: CyclicReference => ce.inImplicitSearch = true throw ce - end result0 + else NoMatchingImplicitsFailure val result = result0 match { @@ -1037,7 +1069,7 @@ trait Implicits: if result.tstate ne ctx.typerState then result.tstate.commit() if result.gstate ne ctx.gadt then - ctx.gadt.restore(result.gstate) + ctx.gadtState.restore(result.gstate) if hasSkolem(false, result.tree) then report.error(SkolemInInferred(result.tree, pt, argument), ctx.source.atSpan(span)) implicits.println(i"success: $result") @@ -1050,14 +1082,15 @@ trait Implicits: withMode(Mode.OldOverloadingResolution)(inferImplicit(pt, argument, span)) match { case altResult: SearchSuccess => report.migrationWarning( - s"According to new implicit resolution rules, this will be ambiguous:\n${result.reason.explanation}", + result.reason.msg + .prepend(s"According to new implicit resolution rules, this will be ambiguous:\n"), ctx.source.atSpan(span)) altResult case _ => result } else result - case NoMatchingImplicitsFailure => + case NoMatchingImplicitsFailure if usableForInference => SearchFailure(new NoMatchingImplicits(pt, argument, ctx.typerState.constraint), span) case _ => result0 @@ -1136,19 +1169,22 @@ trait Implicits: if ctx.reporter.hasErrors || !cand.ref.symbol.isAccessibleFrom(cand.ref.prefix) then - ctx.reporter.removeBufferedMessages - adapted.tpe match { + val res = adapted.tpe match { case _: SearchFailureType => SearchFailure(adapted) case error: PreviousErrorType if !adapted.symbol.isAccessibleFrom(cand.ref.prefix) => SearchFailure(adapted.withType(new NestedFailure(error.msg, pt))) - case _ => + case tpe => // Special case for `$conforms` and `<:<.refl`. Showing them to the users brings // no value, so we instead report a `NoMatchingImplicitsFailure` if (adapted.symbol == defn.Predef_conforms || adapted.symbol == defn.SubType_refl) NoMatchingImplicitsFailure + else if Splicer.inMacroExpansion && tpe <:< pt then + SearchFailure(adapted.withType(new MacroErrorsFailure(ctx.reporter.allErrors.reverse, pt, argument))) else SearchFailure(adapted.withType(new MismatchedImplicit(ref, pt, argument))) } + ctx.reporter.removeBufferedMessages + res else SearchSuccess(adapted, ref, cand.level, cand.isExtension)(ctx.typerState, ctx.gadt) } @@ -1357,13 +1393,13 @@ trait Implicits: def warnAmbiguousNegation(ambi: AmbiguousImplicits) = report.migrationWarning( - i"""Ambiguous implicits ${ambi.alt1.ref.symbol.showLocated} and ${ambi.alt2.ref.symbol.showLocated} - |seem to be used to implement a local failure in order to negate an implicit search. - |According to the new implicit resolution rules this is no longer possible; - |the search will fail with a global ambiguity error instead. - | - |Consider using the scala.util.NotGiven class to implement similar functionality.""", - srcPos) + em"""Ambiguous implicits ${ambi.alt1.ref.symbol.showLocated} and ${ambi.alt2.ref.symbol.showLocated} + |seem to be used to implement a local failure in order to negate an implicit search. + |According to the new implicit resolution rules this is no longer possible; + |the search will fail with a global ambiguity error instead. + | + |Consider using the scala.util.NotGiven class to implement similar functionality.""", + srcPos) /** Compare the length of the baseClasses of two symbols (except for objects, * where we use the length of the companion class instead if it's bigger). @@ -1557,7 +1593,6 @@ trait Implicits: * implicit search. * * @param cand The candidate implicit to be explored. - * @param pt The target type for the above candidate. * @result True if this candidate/pt are divergent, false otherwise. */ def checkDivergence(cand: Candidate): Boolean = diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 27b83e025cf9..4d027b8750e0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -6,15 +6,14 @@ import core._ import ast._ import Contexts._, Types._, Flags._, Symbols._ import ProtoTypes._ -import NameKinds.{AvoidNameKind, UniqueName} +import NameKinds.UniqueName import util.Spans._ -import util.{Stats, SimpleIdentityMap, SrcPos} +import util.{Stats, SimpleIdentityMap, SimpleIdentitySet, SrcPos} import Decorators._ import config.Printers.{gadts, typr} import annotation.tailrec import reporting._ import collection.mutable - import scala.annotation.internal.sharable object Inferencing { @@ -27,12 +26,8 @@ object Inferencing { * but only if the overall result of `isFullyDefined` is `true`. * Variables that are successfully minimized do not count as uninstantiated. */ - def isFullyDefined(tp: Type, force: ForceDegree.Value)(using Context): Boolean = { - val nestedCtx = ctx.fresh.setNewTyperState() - val result = new IsFullyDefinedAccumulator(force)(using nestedCtx).process(tp) - if (result) nestedCtx.typerState.commit() - result - } + def isFullyDefined(tp: Type, force: ForceDegree.Value)(using Context): Boolean = + withFreshTyperState(new IsFullyDefinedAccumulator(force).process(tp), x => x) /** Try to fully define `tp`. Return whether constraint has changed. * Any changed constraint is kept. @@ -171,14 +166,18 @@ object Inferencing { private var toMaximize: List[TypeVar] = Nil - def apply(x: Boolean, tp: Type): Boolean = - try tp.dealias match + def apply(x: Boolean, tp: Type): Boolean = trace(i"isFullyDefined($tp, $force)", typr) { + try { + val tpd = tp.dealias + if tpd ne tp then apply(x, tpd) + else tp match case _: WildcardType | _: ProtoType => false case tvar: TypeVar if !tvar.isInstantiated => force.appliesTo(tvar) && ctx.typerState.constraint.contains(tvar) && { + var fail = false val direction = instDirection(tvar.origin) if minimizeSelected then if direction <= 0 && tvar.hasLowerBound then @@ -188,20 +187,23 @@ object Inferencing { // else hold off instantiating unbounded unconstrained variable else if direction != 0 then instantiate(tvar, fromBelow = direction < 0) - else if variance >= 0 && (force.ifBottom == IfBottom.ok || tvar.hasLowerBound) then + else if variance >= 0 && tvar.hasLowerBound then + instantiate(tvar, fromBelow = true) + else if (variance > 0 || variance == 0 && !tvar.hasUpperBound) + && force.ifBottom == IfBottom.ok + then // if variance == 0, prefer upper bound if one is given instantiate(tvar, fromBelow = true) else if variance >= 0 && force.ifBottom == IfBottom.fail then - return false + fail = true else toMaximize = tvar :: toMaximize - foldOver(x, tvar) - } - case tp => - reporting.trace(s"IFT $tp") { - foldOver(x, tp) + !fail && foldOver(x, tvar) } + case tp => foldOver(x, tp) + } catch case ex: Throwable => handleRecursive("check fully defined", tp.show, ex) + } def process(tp: Type): Boolean = // Maximize type vars in the order they were visited before */ @@ -267,7 +269,7 @@ object Inferencing { && ctx.gadt.contains(tp.symbol) => val sym = tp.symbol - val res = ctx.gadt.approximation(sym, fromBelow = variance < 0) + val res = ctx.gadtState.approximation(sym, fromBelow = variance < 0) gadts.println(i"approximated $tp ~~ $res") res @@ -312,16 +314,17 @@ object Inferencing { } /** If `tree` has a type lambda type, infer its type parameters by comparing with expected type `pt` */ - def inferTypeParams(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match { + def inferTypeParams(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match case tl: TypeLambda => val (tl1, tvars) = constrained(tl, tree) var tree1 = AppliedTypeTree(tree.withType(tl1), tvars) tree1.tpe <:< pt - fullyDefinedType(tree1.tpe, "template parent", tree.srcPos) - tree1 + if isFullyDefined(tree1.tpe, force = ForceDegree.failBottom) then + tree1 + else + EmptyTree case _ => tree - } def isSkolemFree(tp: Type)(using Context): Boolean = !tp.existsPart(_.isInstanceOf[SkolemType]) @@ -418,7 +421,7 @@ object Inferencing { if safeToInstantiate then tvar.instantiate(fromBelow = v == -1) else { val bounds = TypeComparer.fullBounds(tvar.origin) - if bounds.hi <:< bounds.lo || bounds.hi.classSymbol.is(Final) then + if (bounds.hi frozen_<:< bounds.lo) || bounds.hi.classSymbol.is(Final) then tvar.instantiate(fromBelow = false) else { // We do not add the created symbols to GADT constraint immediately, since they may have inter-dependencies. @@ -437,7 +440,7 @@ object Inferencing { } // We add the created symbols to GADT constraint here. - if (res.nonEmpty) ctx.gadt.addToConstraint(res) + if (res.nonEmpty) ctx.gadtState.addToConstraint(res) res } @@ -547,6 +550,10 @@ object Inferencing { case tp: AnnotatedType => tp.derivedAnnotatedType(captureWildcards(tp.parent), tp.annot) case _ => tp } + + def hasCaptureConversionArg(tp: Type)(using Context): Boolean = tp match + case tp: AppliedType => tp.args.exists(_.typeSymbol == defn.TypeBox_CAP) + case _ => false } trait Inferencing { this: Typer => @@ -574,7 +581,7 @@ trait Inferencing { this: Typer => * Then `Y` also occurs co-variantly in `T` because it needs to be minimized in order to constrain * `T` the least. See `variances` for more detail. */ - def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = { + def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = val state = ctx.typerState // Note that some variables in `locked` might not be in `state.ownedVars` @@ -583,7 +590,7 @@ trait Inferencing { this: Typer => // `qualifying`. val ownedVars = state.ownedVars - if ((ownedVars ne locked) && !ownedVars.isEmpty) { + if (ownedVars ne locked) && !ownedVars.isEmpty then val qualifying = ownedVars -- locked if (!qualifying.isEmpty) { typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") @@ -619,44 +626,67 @@ trait Inferencing { this: Typer => if state.reporter.hasUnreportedErrors then return tree def constraint = state.constraint - type InstantiateQueue = mutable.ListBuffer[(TypeVar, Boolean)] - val toInstantiate = new InstantiateQueue - for tvar <- qualifying do - if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then - constrainIfDependentParamRef(tvar, tree) - // Needs to be checked again, since previous interpolations could already have - // instantiated `tvar` through unification. - val v = vs(tvar) - if v == null then - // Even though `tvar` is non-occurring in `v`, the specific - // instantiation we pick still matters because `tvar` might appear - // in the bounds of a non-`qualifying` type variable in the - // constraint. - // In particular, if `tvar` was created as the upper or lower - // bound of an existing variable by `LevelAvoidMap`, we - // instantiate it in the direction corresponding to the - // original variable which might be further constrained later. - // Otherwise, we simply rely on `hasLowerBound`. - val name = tvar.origin.paramName - val fromBelow = - name.is(AvoidNameKind.UpperBound) || - !name.is(AvoidNameKind.LowerBound) && tvar.hasLowerBound - typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") - toInstantiate += ((tvar, fromBelow)) - else if v.intValue != 0 then - typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") - toInstantiate += ((tvar, v.intValue == 1)) - else comparing(cmp => - if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then - // Invariant: The type of a tree whose enclosing scope is level - // N only contains type variables of level <= N. - typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") - cmp.atLevel(ctx.nestingLevel, tvar.origin) - else - typr.println(i"no interpolation for nonvariant $tvar in $state") - ) - /** Instantiate all type variables in `buf` in the indicated directions. + /** Values of this type report type variables to instantiate with variance indication: + * +1 variable appears covariantly, can be instantiated from lower bound + * -1 variable appears contravariantly, can be instantiated from upper bound + * 0 variable does not appear at all, can be instantiated from either bound + */ + type ToInstantiate = List[(TypeVar, Int)] + + val toInstantiate: ToInstantiate = + val buf = new mutable.ListBuffer[(TypeVar, Int)] + for tvar <- qualifying do + if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then + constrainIfDependentParamRef(tvar, tree) + if !tvar.isInstantiated then + // isInstantiated needs to be checked again, since previous interpolations could already have + // instantiated `tvar` through unification. + val v = vs(tvar) + if v == null then buf += ((tvar, 0)) + else if v.intValue != 0 then buf += ((tvar, v.intValue)) + else comparing(cmp => + if !cmp.levelOK(tvar.nestingLevel, ctx.nestingLevel) then + // Invariant: The type of a tree whose enclosing scope is level + // N only contains type variables of level <= N. + typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") + cmp.atLevel(ctx.nestingLevel, tvar.origin) + else + typr.println(i"no interpolation for nonvariant $tvar in $state") + ) + buf.toList + + def typeVarsIn(xs: ToInstantiate): TypeVars = + xs.foldLeft(SimpleIdentitySet.empty: TypeVars)((tvs, tvi) => tvs + tvi._1) + + /** Filter list of proposed instantiations so that they don't constrain further + * the current constraint. + */ + def filterByDeps(tvs0: ToInstantiate): ToInstantiate = + val excluded = // ignore dependencies from other variables that are being instantiated + typeVarsIn(tvs0) + def step(tvs: ToInstantiate): ToInstantiate = tvs match + case tvs @ (hd @ (tvar, v)) :: tvs1 => + def aboveOK = !constraint.dependsOn(tvar, excluded, co = true) + def belowOK = !constraint.dependsOn(tvar, excluded, co = false) + if v == 0 && !aboveOK then + step((tvar, 1) :: tvs1) + else if v == 0 && !belowOK then + step((tvar, -1) :: tvs1) + else if v == -1 && !aboveOK || v == 1 && !belowOK then + typr.println(i"drop $tvar, $v in $tp, $pt, qualifying = ${qualifying.toList}, tvs0 = ${tvs0.toList}%, %, excluded = ${excluded.toList}, $constraint") + step(tvs1) + else // no conflict, keep the instantiation proposal + tvs.derivedCons(hd, step(tvs1)) + case Nil => + Nil + val tvs1 = step(tvs0) + if tvs1 eq tvs0 then tvs1 + else filterByDeps(tvs1) // filter again with smaller excluded set + end filterByDeps + + /** Instantiate all type variables in `tvs` in the indicated directions, + * as described in the doc comment of `ToInstantiate`. * If a type variable A is instantiated from below, and there is another * type variable B in `buf` that is known to be smaller than A, wait and * instantiate all other type variables before trying to instantiate A again. @@ -685,29 +715,37 @@ trait Inferencing { this: Typer => * * V2 := V3, O2 := O3 */ - def doInstantiate(buf: InstantiateQueue): Unit = - if buf.nonEmpty then - val suspended = new InstantiateQueue - while buf.nonEmpty do - val first @ (tvar, fromBelow) = buf.head - buf.dropInPlace(1) - if !tvar.isInstantiated then - val suspend = buf.exists{ (following, _) => - if fromBelow then - constraint.isLess(following.origin, tvar.origin) - else - constraint.isLess(tvar.origin, following.origin) + def doInstantiate(tvs: ToInstantiate): Unit = + + /** Try to instantiate `tvs`, return any suspended type variables */ + def tryInstantiate(tvs: ToInstantiate): ToInstantiate = tvs match + case (hd @ (tvar, v)) :: tvs1 => + val fromBelow = v == 1 || (v == 0 && tvar.hasLowerBound) + typr.println( + i"interpolate${if v == 0 then " non-occurring" else ""} $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") + if tvar.isInstantiated then + tryInstantiate(tvs1) + else + val suspend = tvs1.exists{ (following, _) => + if fromBelow + then constraint.isLess(following.origin, tvar.origin) + else constraint.isLess(tvar.origin, following.origin) } - if suspend then suspended += first else tvar.instantiate(fromBelow) - end if - end while - doInstantiate(suspended) + if suspend then + typr.println(i"suspended: $hd") + hd :: tryInstantiate(tvs1) + else + tvar.instantiate(fromBelow) + tryInstantiate(tvs1) + case Nil => Nil + if tvs.nonEmpty then doInstantiate(tryInstantiate(tvs)) end doInstantiate - doInstantiate(toInstantiate) + + doInstantiate(filterByDeps(toInstantiate)) } - } + end if tree - } + end interpolateTypeVars /** If `tvar` represents a parameter of a dependent method type in the current `call` * approximate it from below with the type of the actual argument. Skolemize that @@ -737,13 +775,14 @@ trait Inferencing { this: Typer => end constrainIfDependentParamRef } -/** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */ +/** An enumeration controlling the degree of forcing in "is-fully-defined" checks. */ @sharable object ForceDegree { - class Value(val appliesTo: TypeVar => Boolean, val ifBottom: IfBottom) - val none: Value = new Value(_ => false, IfBottom.ok) - val all: Value = new Value(_ => true, IfBottom.ok) - val failBottom: Value = new Value(_ => true, IfBottom.fail) - val flipBottom: Value = new Value(_ => true, IfBottom.flip) + class Value(val appliesTo: TypeVar => Boolean, val ifBottom: IfBottom): + override def toString = s"ForceDegree.Value(.., $ifBottom)" + val none: Value = new Value(_ => false, IfBottom.ok) { override def toString = "ForceDegree.none" } + val all: Value = new Value(_ => true, IfBottom.ok) { override def toString = "ForceDegree.all" } + val failBottom: Value = new Value(_ => true, IfBottom.fail) { override def toString = "ForceDegree.failBottom" } + val flipBottom: Value = new Value(_ => true, IfBottom.flip) { override def toString = "ForceDegree.flipBottom" } } enum IfBottom: diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index ad8d0e50d348..cc4433f75a68 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -201,7 +201,7 @@ class Namer { typer: Typer => case tree: MemberDef => SymDenotations.canBeLocal(tree.name, flags) case _ => false if !ok then - report.error(i"modifier(s) `${flags.flagsString}` incompatible with $kind definition", tree.srcPos) + report.error(em"modifier(s) `${flags.flagsString}` incompatible with $kind definition", tree.srcPos) if adapted.is(Private) && canBeLocal then adapted | Local else adapted } @@ -461,8 +461,8 @@ class Namer { typer: Typer => val isProvisional = parents.exists(!_.baseType(defn.AnyClass).exists) if isProvisional then typr.println(i"provisional superclass $first for $cls") - first = AnnotatedType(first, Annotation(defn.ProvisionalSuperClassAnnot)) - checkFeasibleParent(first, cls.srcPos, em" in inferred superclass $first") :: parents + first = AnnotatedType(first, Annotation(defn.ProvisionalSuperClassAnnot, cls.span)) + checkFeasibleParent(first, cls.srcPos, i" in inferred superclass $first") :: parents end ensureFirstIsClass /** Add child annotation for `child` to annotations of `cls`. The annotation @@ -541,7 +541,11 @@ class Namer { typer: Typer => res = cpy.TypeDef(modCls)( rhs = cpy.Template(modTempl)( derived = if (fromTempl.derived.nonEmpty) fromTempl.derived else modTempl.derived, - body = fromTempl.body ++ modTempl.body)) + body = fromTempl.body.filter { + case stat: DefDef => stat.name != nme.toString_ + // toString should only be generated if explicit companion is missing + case _ => true + } ++ modTempl.body)) if (fromTempl.derived.nonEmpty) { if (modTempl.derived.nonEmpty) report.error(em"a class and its companion cannot both have `derives` clauses", mdef.srcPos) @@ -762,7 +766,7 @@ class Namer { typer: Typer => } def missingType(sym: Symbol, modifier: String)(using Context): Unit = { - report.error(s"${modifier}type of implicit definition needs to be given explicitly", sym.srcPos) + report.error(em"${modifier}type of implicit definition needs to be given explicitly", sym.srcPos) sym.resetFlag(GivenOrImplicit) } @@ -831,9 +835,9 @@ class Namer { typer: Typer => for (annotTree <- original.mods.annotations) { val cls = typedAheadAnnotationClass(annotTree)(using annotCtx) if (cls eq sym) - report.error("An annotation class cannot be annotated with iself", annotTree.srcPos) + report.error(em"An annotation class cannot be annotated with iself", annotTree.srcPos) else { - val ann = Annotation.deferred(cls)(typedAheadAnnotation(annotTree)(using annotCtx)) + val ann = Annotation.deferred(cls)(typedAheadExpr(annotTree)(using annotCtx)) sym.addAnnotation(ann) } } @@ -858,7 +862,6 @@ class Namer { typer: Typer => * with a user-defined method in the same scope with a matching type. */ private def invalidateIfClashingSynthetic(denot: SymDenotation): Unit = - def isCaseClassOrCompanion(owner: Symbol) = owner.isClass && { if (owner.is(Module)) owner.linkedClass.is(CaseClass) @@ -875,10 +878,19 @@ class Namer { typer: Typer => !sd.symbol.is(Deferred) && sd.matches(denot))) val isClashingSynthetic = - denot.is(Synthetic, butNot = ConstructorProxy) - && desugar.isRetractableCaseClassMethodName(denot.name) - && isCaseClassOrCompanion(denot.owner) - && (definesMember || inheritsConcreteMember) + denot.is(Synthetic, butNot = ConstructorProxy) && + ( + (desugar.isRetractableCaseClassMethodName(denot.name) + && isCaseClassOrCompanion(denot.owner) + && (definesMember || inheritsConcreteMember) + ) + || + // remove synthetic constructor of a java Record if it clashes with a non-synthetic constructor + (denot.isConstructor + && denot.owner.is(JavaDefined) && denot.owner.derivesFrom(defn.JavaRecordClass) + && denot.owner.unforcedDecls.lookupAll(denot.name).exists(c => c != denot.symbol && c.info.matches(denot.info)) + ) + ) if isClashingSynthetic then typr.println(i"invalidating clashing $denot in ${denot.owner}") @@ -1227,13 +1239,21 @@ class Namer { typer: Typer => case pt: MethodOrPoly => 1 + extensionParamsCount(pt.resType) case _ => 0 val ddef = tpd.DefDef(forwarder.asTerm, prefss => { + val forwarderCtx = ctx.withOwner(forwarder) val (pathRefss, methRefss) = prefss.splitAt(extensionParamsCount(path.tpe.widen)) val ref = path.appliedToArgss(pathRefss).select(sym.asTerm) - ref.appliedToArgss(adaptForwarderParams(Nil, sym.info, methRefss)) - .etaExpandCFT(using ctx.withOwner(forwarder)) + val rhs = ref.appliedToArgss(adaptForwarderParams(Nil, sym.info, methRefss)) + .etaExpandCFT(using forwarderCtx) + if forwarder.isInlineMethod then + // Eagerly make the body inlineable. `registerInlineInfo` does this lazily + // but it does not get evaluated during typer as the forwarder we are creating + // is already typed. + val inlinableRhs = PrepareInlineable.makeInlineable(rhs)(using forwarderCtx) + PrepareInlineable.registerInlineInfo(forwarder, inlinableRhs)(using forwarderCtx) + inlinableRhs + else + rhs }) - if forwarder.isInlineMethod then - PrepareInlineable.registerInlineInfo(forwarder, ddef.rhs) buf += ddef.withSpan(span) if hasDefaults then foreachDefaultGetterOf(sym.asTerm, @@ -1249,7 +1269,7 @@ class Namer { typer: Typer => val reason = mbrs.map(canForward(_, alias)).collect { case CanForward.No(whyNot) => i"\n$path.$name cannot be exported because it $whyNot" }.headOption.getOrElse("") - report.error(i"""no eligible member $name at $path$reason""", ctx.source.atSpan(span)) + report.error(em"""no eligible member $name at $path$reason""", ctx.source.atSpan(span)) else targets += alias @@ -1314,7 +1334,7 @@ class Namer { typer: Typer => case _ => 0 if cmp == 0 then report.error( - ex"""Clashing exports: The exported + em"""Clashing exports: The exported | ${forwarder.rhs.symbol}: ${alt1.widen} |and ${forwarder1.rhs.symbol}: ${alt2.widen} |have the same signature after erasure and overloading resolution could not disambiguate.""", @@ -1335,7 +1355,7 @@ class Namer { typer: Typer => * * The idea is that this simulates the hypothetical case where export forwarders * are not generated and we treat an export instead more like an import where we - * expand the use site reference. Test cases in {neg,pos}/i14699.scala. + * expand the use site reference. Test cases in {neg,pos}/i14966.scala. * * @pre Forwarders with the same name are consecutive in `forwarders`. */ @@ -1437,7 +1457,7 @@ class Namer { typer: Typer => case mt: MethodType if cls.is(Case) && mt.isParamDependent => // See issue #8073 for background report.error( - i"""Implementation restriction: case classes cannot have dependencies between parameters""", + em"""Implementation restriction: case classes cannot have dependencies between parameters""", cls.srcPos) case _ => @@ -1453,27 +1473,41 @@ class Namer { typer: Typer => * only if parent type contains uninstantiated type parameters. */ def parentType(parent: untpd.Tree)(using Context): Type = - if (parent.isType) - typedAheadType(parent, AnyTypeConstructorProto).tpe - else { - val (core, targs) = stripApply(parent) match { + + def typedParentApplication(parent: untpd.Tree): Type = + val (core, targs) = stripApply(parent) match case TypeApply(core, targs) => (core, targs) case core => (core, Nil) - } - core match { + core match case Select(New(tpt), nme.CONSTRUCTOR) => val targs1 = targs map (typedAheadType(_)) val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes if (ptype.typeParams.isEmpty) ptype - else { + else if (denot.is(ModuleClass) && denot.sourceModule.isOneOf(GivenOrImplicit)) missingType(denot.symbol, "parent ")(using creationContext) fullyDefinedType(typedAheadExpr(parent).tpe, "class parent", parent.srcPos) - } case _ => UnspecifiedErrorType.assertingErrorsReported - } - } + + def typedParentType(tree: untpd.Tree): tpd.Tree = + val parentTpt = typer.typedType(parent, AnyTypeConstructorProto) + val ptpe = parentTpt.tpe + if ptpe.typeParams.nonEmpty + && ptpe.underlyingClassRef(refinementOK = false).exists + then + // Try to infer type parameters from a synthetic application. + // This might yield new info if implicit parameters are resolved. + // A test case is i16778.scala. + val app = untpd.Apply(untpd.Select(untpd.New(parentTpt), nme.CONSTRUCTOR), Nil) + typedParentApplication(app) + app.getAttachment(TypedAhead).getOrElse(parentTpt) + else + parentTpt + + if parent.isType then typedAhead(parent, typedParentType).tpe + else typedParentApplication(parent) + end parentType /** Check parent type tree `parent` for the following well-formedness conditions: * (1) It must be a class type with a stable prefix (@see checkClassTypeWithStablePrefix) @@ -1607,7 +1641,7 @@ class Namer { typer: Typer => case Some(ttree) => ttree case none => val ttree = typed(tree) - xtree.putAttachment(TypedAhead, ttree) + if !ttree.isEmpty then xtree.putAttachment(TypedAhead, ttree) ttree } } @@ -1618,15 +1652,14 @@ class Namer { typer: Typer => def typedAheadExpr(tree: Tree, pt: Type = WildcardType)(using Context): tpd.Tree = typedAhead(tree, typer.typedExpr(_, pt)) - def typedAheadAnnotation(tree: Tree)(using Context): tpd.Tree = - typedAheadExpr(tree, defn.AnnotationClass.typeRef) - - def typedAheadAnnotationClass(tree: Tree)(using Context): Symbol = tree match { + def typedAheadAnnotationClass(tree: Tree)(using Context): Symbol = tree match case Apply(fn, _) => typedAheadAnnotationClass(fn) case TypeApply(fn, _) => typedAheadAnnotationClass(fn) case Select(qual, nme.CONSTRUCTOR) => typedAheadAnnotationClass(qual) case New(tpt) => typedAheadType(tpt).tpe.classSymbol - } + case TypedSplice(_) => + val sym = tree.symbol + if sym.isConstructor then sym.owner else sym /** Enter and typecheck parameter list */ def completeParams(params: List[MemberDef])(using Context): Unit = { @@ -1690,8 +1723,10 @@ class Namer { typer: Typer => if !Config.checkLevelsOnConstraints then val hygienicType = TypeOps.avoid(rhsType, termParamss.flatten) if (!hygienicType.isValueType || !(hygienicType <:< tpt.tpe)) - report.error(i"return type ${tpt.tpe} of lambda cannot be made hygienic;\n" + - i"it is not a supertype of the hygienic type $hygienicType", mdef.srcPos) + report.error( + em"""return type ${tpt.tpe} of lambda cannot be made hygienic + |it is not a supertype of the hygienic type $hygienicType""", + mdef.srcPos) //println(i"lifting $rhsType over $termParamss -> $hygienicType = ${tpt.tpe}") //println(TypeComparer.explained { implicit ctx => hygienicType <:< tpt.tpe }) case _ => @@ -1863,7 +1898,7 @@ class Namer { typer: Typer => // so we must allow constraining its type parameters // compare with typedDefDef, see tests/pos/gadt-inference.scala rhsCtx.setFreshGADTBounds - rhsCtx.gadt.addToConstraint(typeParams) + rhsCtx.gadtState.addToConstraint(typeParams) } def typedAheadRhs(pt: Type) = @@ -1882,7 +1917,7 @@ class Namer { typer: Typer => // larger choice of overrides (see `default-getter.scala`). // For justification on the use of `@uncheckedVariance`, see // `default-getter-variance.scala`. - AnnotatedType(defaultTp, Annotation(defn.UncheckedVarianceAnnot)) + AnnotatedType(defaultTp, Annotation(defn.UncheckedVarianceAnnot, sym.span)) else // don't strip @uncheckedVariance annot for default getters TypeOps.simplify(tp.widenTermRefExpr, diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 6fb019ee057c..bde279c582e6 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -13,6 +13,8 @@ import Decorators._ import Uniques._ import inlines.Inlines import config.Printers.typr +import Inferencing.* +import ErrorReporting.* import util.SourceFile import TypeComparer.necessarySubType @@ -295,6 +297,8 @@ object ProtoTypes { */ @sharable object AnySelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true) + @sharable object SingletonTypeProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true) + /** A prototype for selections in pattern constructors */ class UnapplySelectionProto(name: Name) extends SelectionProto(name, WildcardType, NoViewsAllowed, true) @@ -368,7 +372,7 @@ object ProtoTypes { private def isUndefined(tp: Type): Boolean = tp match { case _: WildcardType => true - case defn.FunctionOf(args, result, _, _) => args.exists(isUndefined) || isUndefined(result) + case defn.FunctionOf(args, result, _) => args.exists(isUndefined) || isUndefined(result) case _ => false } @@ -490,7 +494,21 @@ object ProtoTypes { val targ = cacheTypedArg(arg, typer.typedUnadapted(_, wideFormal, locked)(using argCtx), force = true) - typer.adapt(targ, wideFormal, locked) + val targ1 = typer.adapt(targ, wideFormal, locked) + if wideFormal eq formal then targ1 + else checkNoWildcardCaptureForCBN(targ1) + } + + def checkNoWildcardCaptureForCBN(targ1: Tree)(using Context): Tree = { + if hasCaptureConversionArg(targ1.tpe) then + val tp = stripCast(targ1).tpe + errorTree(targ1, + em"""argument for by-name parameter is not a value + |and contains wildcard arguments: $tp + | + |Assign it to a val and pass that instead. + |""") + else targ1 } /** The type of the argument `arg`, or `NoType` if `arg` has not been typed before @@ -669,10 +687,12 @@ object ProtoTypes { * * [] _ */ - @sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways + @sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways: + override def toString = "AnyFunctionProto" /** A prototype for type constructors that are followed by a type application */ - @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways + @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways: + override def toString = "AnyTypeConstructorProto" extension (pt: Type) def isExtensionApplyProto: Boolean = pt match @@ -822,7 +842,7 @@ object ProtoTypes { normalize(et.resultType, pt) case wtp => val iftp = defn.asContextFunctionType(wtp) - if iftp.exists && followIFT then normalize(iftp.dropDependentRefinement.argInfos.last, pt) + if iftp.exists && followIFT then normalize(iftp.functionArgInfos.last, pt) else tp } } @@ -944,8 +964,8 @@ object ProtoTypes { object dummyTreeOfType { def apply(tp: Type)(implicit src: SourceFile): Tree = untpd.Literal(Constant(null)) withTypeUnchecked tp - def unapply(tree: untpd.Tree): Option[Type] = tree match { - case Literal(Constant(null)) => Some(tree.typeOpt) + def unapply(tree: untpd.Tree): Option[Type] = untpd.unsplice(tree) match { + case tree @ Literal(Constant(null)) => Some(tree.typeOpt) case _ => None } } diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index fa29f450be2a..070449e3ee96 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -11,12 +11,13 @@ import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.NameKinds.PatMatGivenVarName import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StagingContext._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ import dotty.tools.dotc.inlines.PrepareInlineable +import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.typer.ErrorReporting.errorTree import dotty.tools.dotc.typer.Implicits._ import dotty.tools.dotc.typer.Inferencing._ import dotty.tools.dotc.util.Spans._ @@ -36,30 +37,33 @@ trait QuotesAndSplices { */ def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = { record("typedQuote") - tree.quoted match { - case untpd.Splice(innerExpr) if tree.isTerm && !ctx.mode.is(Mode.Pattern) => + tree.body match { + case _: untpd.Splice if tree.isTerm && !ctx.mode.is(Mode.Pattern) => report.warning("Canceled splice directly inside a quote. '{ ${ XYZ } } is equivalent to XYZ.", tree.srcPos) case _ => } - val qctx = inferImplicitArg(defn.QuotesClass.typeRef, tree.span) + val quotes = inferImplicitArg(defn.QuotesClass.typeRef, tree.span) - if qctx.tpe.isInstanceOf[SearchFailureType] then - report.error(missingArgMsg(qctx, defn.QuotesClass.typeRef, ""), ctx.source.atSpan(tree.span)) - else if !qctx.tpe.isStable then - report.error(em"Quotes require stable Quotes, but found non stable $qctx", qctx.srcPos) + if quotes.tpe.isInstanceOf[SearchFailureType] then + report.error(missingArgMsg(quotes, defn.QuotesClass.typeRef, ""), ctx.source.atSpan(tree.span)) + else if !quotes.tpe.isStable then + report.error(em"Quotes require stable Quotes, but found non stable $quotes", quotes.srcPos) if ctx.mode.is(Mode.Pattern) then - typedQuotePattern(tree, pt, qctx).withSpan(tree.span) - else if tree.quoted.isType then + typedQuotePattern(tree, pt, quotes).withSpan(tree.span) + else if tree.isTypeQuote then val msg = em"""Quoted types `'[..]` can only be used in patterns. | |Hint: To get a scala.quoted.Type[T] use scala.quoted.Type.of[T] instead. - |""".stripMargin + |""" report.error(msg, tree.srcPos) EmptyTree else - val exprQuoteTree = untpd.Apply(untpd.ref(defn.QuotedRuntime_exprQuote.termRef), tree.quoted) - makeInlineable(typedApply(exprQuoteTree, pt)(using pushQuotes(qctx)).select(nme.apply).appliedTo(qctx).withSpan(tree.span)) + // TODO typecheck directly (without `exprQuote`) + val exprQuoteTree = untpd.Apply(untpd.ref(defn.QuotedRuntime_exprQuote.termRef), tree.body) + val quotedExpr = typedApply(exprQuoteTree, pt)(using quoteContext) match + case Apply(TypeApply(fn, tpt :: Nil), quotedExpr :: Nil) => untpd.Quote(quotedExpr, Nil).withBodyType(tpt.tpe) + makeInlineable(quotedExpr.select(nme.apply).appliedTo(quotes).withSpan(tree.span)) } private def makeInlineable(tree: Tree)(using Context): Tree = @@ -71,47 +75,60 @@ trait QuotesAndSplices { def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = { record("typedSplice") checkSpliceOutsideQuote(tree) + assert(!ctx.mode.is(Mode.QuotedPattern)) tree.expr match { - case untpd.Quote(innerExpr) if innerExpr.isTerm => + case untpd.Quote(innerExpr, Nil) if innerExpr.isTerm => report.warning("Canceled quote directly inside a splice. ${ '{ XYZ } } is equivalent to XYZ.", tree.srcPos) + return typed(innerExpr, pt) case _ => } - if (ctx.mode.is(Mode.QuotedPattern)) - if (isFullyDefined(pt, ForceDegree.flipBottom)) { - def spliceOwner(ctx: Context): Symbol = - if (ctx.mode.is(Mode.QuotedPattern)) spliceOwner(ctx.outer) else ctx.owner - val pat = typedPattern(tree.expr, defn.QuotedExprClass.typeRef.appliedTo(pt))( - using spliceContext.retractMode(Mode.QuotedPattern).addMode(Mode.Pattern).withOwner(spliceOwner(ctx))) - val baseType = pat.tpe.baseType(defn.QuotedExprClass) - val argType = if baseType != NoType then baseType.argTypesHi.head else defn.NothingType - ref(defn.QuotedRuntime_exprSplice).appliedToType(argType).appliedTo(pat) - } - else { - report.error(i"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.expr.srcPos) - tree.withType(UnspecifiedErrorType) - } - else { - if (StagingContext.level == 0) { - // Mark the first inline method from the context as a macro - def markAsMacro(c: Context): Unit = - if (c.owner eq c.outer.owner) markAsMacro(c.outer) - else if (c.owner.isInlineMethod) c.owner.setFlag(Macro) - else if (!c.outer.owner.is(Package)) markAsMacro(c.outer) - else assert(ctx.reporter.hasErrors) // Did not find inline def to mark as macro - markAsMacro(ctx) - } - - val (outerQctx, ctx1) = popQuotes() + if (level == 0) { + // Mark the first inline method from the context as a macro + def markAsMacro(c: Context): Unit = + if (c.owner eq c.outer.owner) markAsMacro(c.outer) + else if (c.owner.isInlineMethod) c.owner.setFlag(Macro) + else if (!c.outer.owner.is(Package)) markAsMacro(c.outer) + else assert(ctx.reporter.hasErrors) // Did not find inline def to mark as macro + markAsMacro(ctx) + } - val internalSplice = - outerQctx match - case Some(qctxRef) => untpd.Apply(untpd.Apply(untpd.ref(defn.QuotedRuntime_exprNestedSplice.termRef), qctxRef), tree.expr) - case _ => untpd.Apply(untpd.ref(defn.QuotedRuntime_exprSplice.termRef), tree.expr) + // TODO typecheck directly (without `exprSplice`) + val internalSplice = + untpd.Apply(untpd.ref(defn.QuotedRuntime_exprSplice.termRef), tree.expr) + typedApply(internalSplice, pt)(using spliceContext).withSpan(tree.span) match + case tree @ Apply(TypeApply(_, tpt :: Nil), spliced :: Nil) if tree.symbol == defn.QuotedRuntime_exprSplice => + cpy.Splice(tree)(spliced) + case tree => tree + } - typedApply(internalSplice, pt)(using ctx1).withSpan(tree.span) - } + def typedSplicePattern(tree: untpd.SplicePattern, pt: Type)(using Context): Tree = { + record("typedSplicePattern") + if isFullyDefined(pt, ForceDegree.flipBottom) then + def patternOuterContext(ctx: Context): Context = + if (ctx.mode.is(Mode.QuotedPattern)) patternOuterContext(ctx.outer) else ctx + val typedArgs = tree.args.map { + case arg: untpd.Ident => + typedExpr(arg) + case arg => + report.error("Open pattern expected an identifier", arg.srcPos) + EmptyTree + } + for arg <- typedArgs if arg.symbol.is(Mutable) do // TODO support these patterns. Possibly using scala.quoted.util.Var + report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) + val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) + val patType = if tree.args.isEmpty then pt else defn.FunctionOf(argTypes, pt) + val pat = typedPattern(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patType))( + using spliceContext.retractMode(Mode.QuotedPattern).addMode(Mode.Pattern).withOwner(patternOuterContext(ctx).owner)) + val baseType = pat.tpe.baseType(defn.QuotedExprClass) + val argType = if baseType.exists then baseType.argTypesHi.head else defn.NothingType + untpd.cpy.SplicePattern(tree)(pat, typedArgs).withType(pt) + else + errorTree(tree, em"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.body.srcPos) } + def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = + throw new UnsupportedOperationException("cannot type check a Hole node") + /** Types a splice applied to some arguments `$f(arg1, ..., argn)` in a quote pattern. * * The tree is desugared into `$f.apply(arg1, ..., argn)` where the expression `$f` @@ -121,28 +138,17 @@ trait QuotesAndSplices { */ def typedAppliedSplice(tree: untpd.Apply, pt: Type)(using Context): Tree = { assert(ctx.mode.is(Mode.QuotedPattern)) - val untpd.Apply(splice: untpd.Splice, args) = tree: @unchecked - if !isFullyDefined(pt, ForceDegree.flipBottom) then - report.error(i"Type must be fully defined.", splice.srcPos) - tree.withType(UnspecifiedErrorType) - else if splice.isInBraces then // ${x}(...) match an application + val untpd.Apply(splice: untpd.SplicePattern, args) = tree: @unchecked + def isInBraces: Boolean = splice.span.end != splice.body.span.end + if isInBraces then // ${x}(...) match an application val typedArgs = args.map(arg => typedExpr(arg)) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val splice1 = typedSplice(splice, defn.FunctionOf(argTypes, pt)) - Apply(splice1.select(nme.apply), typedArgs).withType(pt).withSpan(tree.span) + val splice1 = typedSplicePattern(splice, defn.FunctionOf(argTypes, pt)) + untpd.cpy.Apply(tree)(splice1.select(nme.apply), typedArgs).withType(pt) else // $x(...) higher-order quasipattern - val typedArgs = args.map { - case arg: untpd.Ident => - typedExpr(arg) - case arg => - report.error("Open pattern expected an identifier", arg.srcPos) - EmptyTree - } if args.isEmpty then - report.error("Missing arguments for open pattern", tree.srcPos) - val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val typedPat = typedSplice(splice, defn.FunctionOf(argTypes, pt)) - ref(defn.QuotedRuntimePatterns_patternHigherOrderHole).appliedToType(pt).appliedTo(typedPat, SeqLiteral(typedArgs, TypeTree(defn.AnyType))) + report.error("Missing arguments for open pattern", tree.srcPos) + typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, args), pt) } /** Type a pattern variable name `t` in quote pattern as `${given t$giveni: Type[t @ _]}`. @@ -163,19 +169,15 @@ trait QuotesAndSplices { using spliceContext.retractMode(Mode.QuotedPattern).withOwner(spliceOwner(ctx))) pat.select(tpnme.Underlying) - def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = - val tpt = typedType(tree.tpt) - assignType(tree, tpt) - private def checkSpliceOutsideQuote(tree: untpd.Tree)(using Context): Unit = if (level == 0 && !ctx.owner.ownersIterator.exists(_.isInlineMethod)) report.error("Splice ${...} outside quotes '{...} or inline method", tree.srcPos) else if (level < 0) report.error( - s"""Splice $${...} at level $level. - | - |Inline method may contain a splice at level 0 but the contents of this splice cannot have a splice. - |""".stripMargin, tree.srcPos + em"""Splice $${...} at level $level. + | + |Inline method may contain a splice at level 0 but the contents of this splice cannot have a splice. + |""", tree.srcPos ) /** Split a typed quoted pattern is split into its type bindings, pattern expression and inner patterns. @@ -224,32 +226,17 @@ trait QuotesAndSplices { val freshTypeBindingsBuff = new mutable.ListBuffer[Tree] val typePatBuf = new mutable.ListBuffer[Tree] override def transform(tree: Tree)(using Context) = tree match { - case Typed(Apply(fn, pat :: Nil), tpt) if fn.symbol.isExprSplice && !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => - val tpt1 = transform(tpt) // Transform type bindings - val exprTpt = AppliedTypeTree(TypeTree(defn.QuotedExprClass.typeRef), tpt1 :: Nil) - val newSplice = ref(defn.QuotedRuntime_exprSplice).appliedToType(tpt1.tpe).appliedTo(Typed(pat, exprTpt)) - transform(newSplice) - case Apply(TypeApply(fn, targs), Apply(sp, pat :: Nil) :: args :: Nil) if fn.symbol == defn.QuotedRuntimePatterns_patternHigherOrderHole => - args match // TODO support these patterns. Possibly using scala.quoted.util.Var - case SeqLiteral(args, _) => - for arg <- args; if arg.symbol.is(Mutable) do - report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) - try ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef).appliedToTypeTrees(targs).appliedTo(args).withSpan(tree.span) - finally { - val patType = pat.tpe.widen - val patType1 = patType.translateFromRepeated(toArray = false) - val pat1 = if (patType eq patType1) pat else pat.withType(patType1) - patBuf += pat1 - } - case Apply(fn, pat :: Nil) if fn.symbol.isExprSplice => - try ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) - finally { - val patType = pat.tpe.widen - val patType1 = patType.translateFromRepeated(toArray = false) - val pat1 = if (patType eq patType1) pat else pat.withType(patType1) - patBuf += pat1 - } - case Select(pat, _) if tree.symbol.isTypeSplice => + case Typed(splice @ SplicePattern(pat, Nil), tpt) if !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => + transform(tpt) // Collect type bindings + transform(splice) + case SplicePattern(pat, args) => + val patType = pat.tpe.widen + val patType1 = patType.translateFromRepeated(toArray = false) + val pat1 = if (patType eq patType1) pat else pat.withType(patType1) + patBuf += pat1 + if args.isEmpty then ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) + else ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef).appliedToType(tree.tpe).appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))).withSpan(tree.span) + case Select(pat: Bind, _) if tree.symbol.isTypeSplice => val sym = tree.tpe.dealias.typeSymbol if sym.exists then val tdef = TypeDef(sym.asType).withSpan(sym.span) @@ -263,7 +250,7 @@ trait QuotesAndSplices { transformTypeBindingTypeDef(PatMatGivenVarName.fresh(tdef.name.toTermName), tdef, typePatBuf) else if tdef.symbol.isClass then val kind = if tdef.symbol.is(Module) then "objects" else "classes" - report.error("Implementation restriction: cannot match " + kind, tree.srcPos) + report.error(em"Implementation restriction: cannot match $kind", tree.srcPos) EmptyTree else super.transform(tree) @@ -364,7 +351,7 @@ trait QuotesAndSplices { * * ``` * case scala.internal.quoted.Expr.unapply[ - * Tuple1[t @ _], // Type binging definition + * KList[t @ _, KNil], // Type binging definition * Tuple2[Type[t], Expr[List[t]]] // Typing the result of the pattern match * ]( * Tuple2.unapply @@ -380,13 +367,13 @@ trait QuotesAndSplices { * ) => ... * ``` */ - private def typedQuotePattern(tree: untpd.Quote, pt: Type, qctx: Tree)(using Context): Tree = { - if tree.quoted.isTerm && !pt.derivesFrom(defn.QuotedExprClass) then + private def typedQuotePattern(tree: untpd.Quote, pt: Type, quotes: Tree)(using Context): Tree = { + val quoted = tree.body + if quoted.isTerm && !pt.derivesFrom(defn.QuotedExprClass) then report.error("Quote pattern can only match scrutinees of type scala.quoted.Expr", tree.srcPos) - else if tree.quoted.isType && !pt.derivesFrom(defn.QuotedTypeClass) then + else if quoted.isType && !pt.derivesFrom(defn.QuotedTypeClass) then report.error("Quote pattern can only match scrutinees of type scala.quoted.Type", tree.srcPos) - val quoted = tree.quoted val exprPt = pt.baseType(if quoted.isType then defn.QuotedTypeClass else defn.QuotedExprClass) val quotedPt = exprPt.argInfos.headOption match { case Some(argPt: ValueType) => argPt // excludes TypeBounds @@ -411,7 +398,7 @@ trait QuotesAndSplices { val replaceBindings = new ReplaceBindings val patType = defn.tupleType(splices.tpes.map(tpe => replaceBindings(tpe.widen))) - val typeBindingsTuple = tpd.tupleTypeTree(typeBindings.values.toList) + val typeBindingsTuple = tpd.hkNestedPairsTypeTree(typeBindings.values.toList) val replaceBindingsInTree = new TreeMap { private var bindMap = Map.empty[Symbol, Symbol] @@ -438,13 +425,13 @@ trait QuotesAndSplices { if splices.isEmpty then ref(defn.EmptyTupleModule.termRef) else typed(untpd.Tuple(splices.map(x => untpd.TypedSplice(replaceBindingsInTree.transform(x)))).withSpan(quoted.span), patType) - val quoteClass = if (tree.quoted.isTerm) defn.QuotedExprClass else defn.QuotedTypeClass + val quoteClass = if (quoted.isTerm) defn.QuotedExprClass else defn.QuotedTypeClass val quotedPattern = - if (tree.quoted.isTerm) ref(defn.QuotedRuntime_exprQuote.termRef).appliedToType(defn.AnyType).appliedTo(shape).select(nme.apply).appliedTo(qctx) - else ref(defn.QuotedTypeModule_of.termRef).appliedToTypeTree(shape).appliedTo(qctx) + if (quoted.isTerm) tpd.Quote(shape, Nil).select(nme.apply).appliedTo(quotes) + else ref(defn.QuotedTypeModule_of.termRef).appliedToTypeTree(shape).appliedTo(quotes) - val matchModule = if tree.quoted.isTerm then defn.QuoteMatching_ExprMatch else defn.QuoteMatching_TypeMatch - val unapplyFun = qctx.asInstance(defn.QuoteMatchingClass.typeRef).select(matchModule).select(nme.unapply) + val matchModule = if quoted.isTerm then defn.QuoteMatching_ExprMatch else defn.QuoteMatching_TypeMatch + val unapplyFun = quotes.asInstance(defn.QuoteMatchingClass.typeRef).select(matchModule).select(nme.unapply) UnApply( fun = unapplyFun.appliedToTypeTrees(typeBindingsTuple :: TypeTree(patType) :: Nil), diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 7099234c80e1..1fa6e967fbe1 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -12,6 +12,7 @@ import ast.{tpd, untpd} import scala.util.control.NonFatal import util.Spans.Span import Nullables._ +import staging.StagingLevel.* /** A version of Typer that keeps all symbols defined and referenced in a * previously typed tree. @@ -71,7 +72,7 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking promote(tree) override def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = - promote(TypeTree(tree.tpe).withSpan(tree.span)) + promote(TypeTree(tree.typeOpt).withSpan(tree.span)) override def typedExport(exp: untpd.Export)(using Context): Export = promote(exp) @@ -87,13 +88,32 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking // retract PatternOrTypeBits like in typedExpr withoutMode(Mode.PatternOrTypeBits)(typedUnadapted(tree.fun, AnyFunctionProto)) val implicits1 = tree.implicits.map(typedExpr(_)) - val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.tpe)) - untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.tpe) + val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.typeOpt)) + untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.typeOpt) } override def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = typedApply(tree, selType) + override def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = + assertTyped(tree) + val body1 = typed(tree.body, tree.bodyType)(using quoteContext) + for tag <- tree.tags do assertTyped(tag) + untpd.cpy.Quote(tree)(body1, tree.tags).withType(tree.typeOpt) + + override def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = + assertTyped(tree) + val exprType = // Expr[T] + defn.QuotedExprClass.typeRef.appliedTo(tree.typeOpt) + val quoteType = // Quotes ?=> Expr[T] + defn.FunctionType(1, isContextual = true) + .appliedTo(defn.QuotesClass.typeRef, exprType) + val expr1 = typed(tree.expr, quoteType)(using spliceContext) + untpd.cpy.Splice(tree)(expr1).withType(tree.typeOpt) + + override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = + promote(tree) + override def localDummy(cls: ClassSymbol, impl: untpd.Template)(using Context): Symbol = impl.symbol override def retrieveSym(tree: untpd.Tree)(using Context): Symbol = tree.symbol @@ -124,12 +144,10 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = try super.typedUnadapted(tree, pt, locked) - catch { - case NonFatal(ex) => - if ctx.phase != Phases.typerPhase && ctx.phase != Phases.inliningPhase then - println(i"exception while typing $tree of class ${tree.getClass} # ${tree.uniqueId}") - throw ex - } + catch case NonFatal(ex) if ctx.phase != Phases.typerPhase && ctx.phase != Phases.inliningPhase && !ctx.run.enrichedErrorMessage => + val treeStr = tree.show(using ctx.withPhase(ctx.phase.prevMega)) + println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) + throw ex override def inlineExpansion(mdef: DefDef)(using Context): List[Tree] = mdef :: Nil diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 1aa53d866b5e..025eae3606af 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -58,11 +58,9 @@ object RefChecks { // constructors of different classes are allowed to have defaults if (haveDefaults.exists(x => !x.isConstructor) || owners.distinct.size < haveDefaults.size) report.error( - "in " + clazz + - ", multiple overloaded alternatives of " + haveDefaults.head + - " define default arguments" + ( - if (owners.forall(_ == clazz)) "." - else ".\nThe members with defaults are defined in " + owners.map(_.showLocated).mkString("", " and ", ".")), + em"in $clazz, multiple overloaded alternatives of ${haveDefaults.head} define default arguments${ + if owners.forall(_ == clazz) then "." + else i".\nThe members with defaults are defined in ${owners.map(_.showLocated).mkString("", " and ", ".")}"}", clazz.srcPos) } } @@ -91,24 +89,39 @@ object RefChecks { cls.thisType } + /** - Check that self type of `cls` conforms to self types of all `parents` as seen from + * `cls.thisType` + * - If self type of `cls` is explicit, check that it conforms to the self types + * of all its class symbols. + * @param deep If true and a self type of a parent is not given explicitly, recurse to + * check against the parents of the parent. This is needed when capture checking, + * since we assume (& check) that the capture set of an inferred self type + * is the intersection of the capture sets of all its parents + */ + def checkSelfAgainstParents(cls: ClassSymbol, parents: List[Symbol])(using Context): Unit = + withMode(Mode.CheckBoundsOrSelfType) { + val cinfo = cls.classInfo + + def checkSelfConforms(other: ClassSymbol) = + val otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) + if otherSelf.exists then + if !(cinfo.selfType <:< otherSelf) then + report.error(DoesNotConformToSelfType("illegal inheritance", cinfo.selfType, cls, otherSelf, "parent", other), + cls.srcPos) + + for psym <- parents do + checkSelfConforms(psym.asClass) + } + end checkSelfAgainstParents + /** Check that self type of this class conforms to self types of parents * and required classes. Also check that only `enum` constructs extend * `java.lang.Enum` and no user-written class extends ContextFunctionN. */ def checkParents(cls: Symbol, parentTrees: List[Tree])(using Context): Unit = cls.info match { case cinfo: ClassInfo => - def checkSelfConforms(other: ClassSymbol, category: String, relation: String) = { - val otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) - if otherSelf.exists && !(cinfo.selfType <:< otherSelf) then - report.error(DoesNotConformToSelfType(category, cinfo.selfType, cls, otherSelf, relation, other), - cls.srcPos) - } val psyms = cls.asClass.parentSyms - for (psym <- psyms) - checkSelfConforms(psym.asClass, "illegal inheritance", "parent") - for reqd <- cinfo.cls.givenSelfType.classSymbols do - if reqd != cls then - checkSelfConforms(reqd, "missing requirement", "required") + checkSelfAgainstParents(cls.asClass, psyms) def isClassExtendingJavaEnum = !cls.isOneOf(Enum | Trait) && psyms.contains(defn.JavaEnumClass) @@ -221,9 +234,16 @@ object RefChecks { && inLinearizationOrder(sym1, sym2, parent) && !sym2.is(AbsOverride) - def checkAll(checkOverride: (Symbol, Symbol) => Unit) = + // Checks the subtype relationship tp1 <:< tp2. + // It is passed to the `checkOverride` operation in `checkAll`, to be used for + // compatibility checking. + def checkSubType(tp1: Type, tp2: Type)(using Context): Boolean = tp1 frozen_<:< tp2 + + private val subtypeChecker: (Type, Type) => Context ?=> Boolean = this.checkSubType + + def checkAll(checkOverride: ((Type, Type) => Context ?=> Boolean, Symbol, Symbol) => Unit) = while hasNext do - checkOverride(overriding, overridden) + checkOverride(subtypeChecker, overriding, overridden) next() // The OverridingPairs cursor does assume that concrete overrides abstract @@ -237,7 +257,7 @@ object RefChecks { if dcl.is(Deferred) then for other <- dcl.allOverriddenSymbols do if !other.is(Deferred) then - checkOverride(dcl, other) + checkOverride(checkSubType, dcl, other) end checkAll end OverridingPairsChecker @@ -274,8 +294,11 @@ object RefChecks { * TODO check that classes are not overridden * TODO This still needs to be cleaned up; the current version is a straight port of what was there * before, but it looks too complicated and method bodies are far too large. + * + * @param makeOverridingPairsChecker A function for creating a OverridePairsChecker instance + * from the class symbol and the self type */ - def checkAllOverrides(clazz: ClassSymbol)(using Context): Unit = { + def checkAllOverrides(clazz: ClassSymbol, makeOverridingPairsChecker: ((ClassSymbol, Type) => Context ?=> OverridingPairsChecker) | Null = null)(using Context): Unit = { val self = clazz.thisType val upwardsSelf = upwardsThisType(clazz) var hasErrors = false @@ -301,25 +324,22 @@ object RefChecks { report.error(msg.append(othersMsg), clazz.srcPos) } - def infoString(sym: Symbol) = infoString0(sym, sym.owner != clazz) - def infoStringWithLocation(sym: Symbol) = infoString0(sym, true) - - def infoString0(sym: Symbol, showLocation: Boolean) = { - val sym1 = sym.underlyingSymbol - def info = self.memberInfo(sym1) - val infoStr = - if (sym1.isAliasType) i", which equals ${info.bounds.hi}" - else if (sym1.isAbstractOrParamType && info != TypeBounds.empty) i" with bounds$info" - else if (sym1.is(Module)) "" - else if (sym1.isTerm) i" of type $info" - else "" - i"${if (showLocation) sym1.showLocated else sym1}$infoStr" - } + def infoString(sym: Symbol) = + err.infoString(sym, self, showLocation = sym.owner != clazz) + def infoStringWithLocation(sym: Symbol) = + err.infoString(sym, self, showLocation = true) + + def isInheritedAccessor(mbr: Symbol, other: Symbol): Boolean = + mbr.is(ParamAccessor) + && { + val next = ParamForwarding.inheritedAccessor(mbr) + next == other || isInheritedAccessor(next, other) + } /* Check that all conditions for overriding `other` by `member` - * of class `clazz` are met. - */ - def checkOverride(member: Symbol, other: Symbol): Unit = + * of class `clazz` are met. + */ + def checkOverride(checkSubType: (Type, Type) => Context ?=> Boolean, member: Symbol, other: Symbol): Unit = def memberTp(self: Type) = if (member.isClass) TypeAlias(member.typeRef.EtaExpand(member.typeParams)) else self.memberInfo(member) @@ -329,27 +349,17 @@ object RefChecks { def noErrorType = !memberTp(self).isErroneous && !otherTp(self).isErroneous - def overrideErrorMsg(msg: String, compareTypes: Boolean = false): Message = { - val isConcreteOverAbstract = - (other.owner isSubClass member.owner) && other.is(Deferred) && !member.is(Deferred) - val addendum = - if isConcreteOverAbstract then - ";\n (Note that %s is abstract,\n and is therefore overridden by concrete %s)".format( - infoStringWithLocation(other), - infoStringWithLocation(member)) - else "" - val fullMsg = - s"error overriding ${infoStringWithLocation(other)};\n ${infoString(member)} $msg$addendum" - if compareTypes then OverrideTypeMismatchError(fullMsg, memberTp(self), otherTp(self)) - else OverrideError(fullMsg) - } + def overrideErrorMsg(core: Context ?=> String, compareTypes: Boolean = false): Message = + val (mtp, otp) = if compareTypes then (memberTp(self), otherTp(self)) else (NoType, NoType) + OverrideError(core, self, member, other, mtp, otp) def compatTypes(memberTp: Type, otherTp: Type): Boolean = try isOverridingPair(member, memberTp, other, otherTp, fallBack = warnOnMigration( overrideErrorMsg("no longer has compatible type"), - (if (member.owner == clazz) member else clazz).srcPos, version = `3.0`)) + (if (member.owner == clazz) member else clazz).srcPos, version = `3.0`), + isSubType = checkSubType) catch case ex: MissingType => // can happen when called with upwardsSelf as qualifier of memberTp and otherTp, // because in that case we might access types that are not members of the qualifier. @@ -361,7 +371,16 @@ object RefChecks { * Type members are always assumed to match. */ def trueMatch: Boolean = - member.isType || memberTp(self).matches(otherTp(self)) + member.isType || withMode(Mode.IgnoreCaptures) { + // `matches` does not perform box adaptation so the result here would be + // spurious during capture checking. + // + // Instead of parameterizing `matches` with the function for subtype checking + // with box adaptation, we simply ignore capture annotations here. + // This should be safe since the compatibility under box adaptation is already + // checked. + memberTp(self).matches(otherTp(self)) + } def emitOverrideError(fullmsg: Message) = if (!(hasErrors && member.is(Synthetic) && member.is(Module))) { @@ -378,7 +397,7 @@ object RefChecks { def overrideDeprecation(what: String, member: Symbol, other: Symbol, fix: String): Unit = report.deprecationWarning( - s"overriding $what${infoStringWithLocation(other)} is deprecated;\n ${infoString(member)} should be $fix.", + em"overriding $what${infoStringWithLocation(other)} is deprecated;\n ${infoString(member)} should be $fix.", if member.owner == clazz then member.srcPos else clazz.srcPos) def autoOverride(sym: Symbol) = @@ -464,7 +483,7 @@ object RefChecks { if (autoOverride(member) || other.owner.isAllOf(JavaInterface) && warnOnMigration( - "`override` modifier required when a Java 8 default method is re-implemented".toMessage, + em"`override` modifier required when a Java 8 default method is re-implemented", member.srcPos, version = `3.0`)) member.setFlag(Override) else if (member.isType && self.memberInfo(member) =:= self.memberInfo(other)) @@ -496,7 +515,7 @@ object RefChecks { else if (member.is(ModuleVal) && !other.isRealMethod && !other.isOneOf(DeferredOrLazy)) overrideError("may not override a concrete non-lazy value") else if (member.is(Lazy, butNot = Module) && !other.isRealMethod && !other.is(Lazy) && - !warnOnMigration(overrideErrorMsg("may not override a non-lazy value"), member.srcPos, version = `3.0`)) + !warnOnMigration(overrideErrorMsg("may not override a non-lazy value"), member.srcPos, version = `3.0`)) overrideError("may not override a non-lazy value") else if (other.is(Lazy) && !other.isRealMethod && !member.is(Lazy)) overrideError("must be declared lazy to override a lazy value") @@ -521,7 +540,7 @@ object RefChecks { overrideError(i"cannot override val parameter ${other.showLocated}") else report.deprecationWarning( - i"overriding val parameter ${other.showLocated} is deprecated, will be illegal in a future version", + em"overriding val parameter ${other.showLocated} is deprecated, will be illegal in a future version", member.srcPos) else if !other.isExperimental && member.hasAnnotation(defn.ExperimentalAnnot) then // (1.12) overrideError("may not override non-experimental member") @@ -529,14 +548,8 @@ object RefChecks { overrideDeprecation("", member, other, "removed or renamed") end checkOverride - def isInheritedAccessor(mbr: Symbol, other: Symbol): Boolean = - mbr.is(ParamAccessor) - && { - val next = ParamForwarding.inheritedAccessor(mbr) - next == other || isInheritedAccessor(next, other) - } - - OverridingPairsChecker(clazz, self).checkAll(checkOverride) + val checker = if makeOverridingPairsChecker == null then OverridingPairsChecker(clazz, self) else makeOverridingPairsChecker(clazz, self) + checker.checkAll(checkOverride) printMixinOverrideErrors() // Verifying a concrete class has nothing unimplemented. @@ -544,7 +557,7 @@ object RefChecks { val abstractErrors = new mutable.ListBuffer[String] def abstractErrorMessage = // a little formatting polish - if (abstractErrors.size <= 2) abstractErrors mkString " " + if (abstractErrors.size <= 2) abstractErrors.mkString(" ") else abstractErrors.tail.mkString(abstractErrors.head + ":\n", "\n", "") def abstractClassError(mustBeMixin: Boolean, msg: String): Unit = { @@ -580,7 +593,7 @@ object RefChecks { clazz.nonPrivateMembersNamed(mbr.name) .filterWithPredicate( impl => isConcrete(impl.symbol) - && mbrDenot.matchesLoosely(impl, alwaysCompareTypes = true)) + && withMode(Mode.IgnoreCaptures)(mbrDenot.matchesLoosely(impl, alwaysCompareTypes = true))) .exists /** The term symbols in this class and its baseclasses that are @@ -727,7 +740,7 @@ object RefChecks { def checkNoAbstractDecls(bc: Symbol): Unit = { for (decl <- bc.info.decls) if (decl.is(Deferred)) { - val impl = decl.matchingMember(clazz.thisType) + val impl = withMode(Mode.IgnoreCaptures)(decl.matchingMember(clazz.thisType)) if (impl == NoSymbol || decl.owner.isSubClass(impl.owner)) && !ignoreDeferred(decl) then @@ -774,17 +787,19 @@ object RefChecks { // For each member, check that the type of its symbol, as seen from `self` // can override the info of this member - for (name <- membersToCheck) - for (mbrd <- self.member(name).alternatives) { - val mbr = mbrd.symbol - val mbrType = mbr.info.asSeenFrom(self, mbr.owner) - if (!mbrType.overrides(mbrd.info, relaxedCheck = false, matchLoosely = true)) - report.errorOrMigrationWarning( - em"""${mbr.showLocated} is not a legal implementation of `$name` in $clazz - | its type $mbrType - | does not conform to ${mbrd.info}""", - (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) + withMode(Mode.IgnoreCaptures) { + for (name <- membersToCheck) + for (mbrd <- self.member(name).alternatives) { + val mbr = mbrd.symbol + val mbrType = mbr.info.asSeenFrom(self, mbr.owner) + if (!mbrType.overrides(mbrd.info, relaxedCheck = false, matchLoosely = true)) + report.errorOrMigrationWarning( + em"""${mbr.showLocated} is not a legal implementation of `$name` in $clazz + | its type $mbrType + | does not conform to ${mbrd.info}""", + (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) } + } } /** Check that inheriting a case class does not constitute a variant refinement @@ -796,7 +811,7 @@ object RefChecks { for (baseCls <- caseCls.info.baseClasses.tail) if (baseCls.typeParams.exists(_.paramVarianceSign != 0)) for (problem <- variantInheritanceProblems(baseCls, caseCls, "non-variant", "case ")) - report.errorOrMigrationWarning(problem(), clazz.srcPos, from = `3.0`) + report.errorOrMigrationWarning(problem, clazz.srcPos, from = `3.0`) checkNoAbstractMembers() if (abstractErrors.isEmpty) checkNoAbstractDecls(clazz) @@ -827,7 +842,7 @@ object RefChecks { if cls.paramAccessors.nonEmpty && !mixins.contains(cls) problem <- variantInheritanceProblems(cls, clazz.asClass.superClass, "parameterized", "super") } - report.error(problem(), clazz.srcPos) + report.error(problem, clazz.srcPos) } checkParameterizedTraitsOK() @@ -841,13 +856,13 @@ object RefChecks { * Return an optional by name error message if this test fails. */ def variantInheritanceProblems( - baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[() => String] = { + baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[Message] = { val superBT = self.baseType(middle) val thisBT = self.baseType(baseCls) val combinedBT = superBT.baseType(baseCls) if (combinedBT =:= thisBT) None // ok else - Some(() => + Some( em"""illegal inheritance: $clazz inherits conflicting instances of $baseStr base $baseCls. | | Direct basetype: $thisBT @@ -944,7 +959,7 @@ object RefChecks { for bc <- cls.baseClasses.tail do val other = sym.matchingDecl(bc, cls.thisType) if other.exists then - report.error(i"private $sym cannot override ${other.showLocated}", sym.srcPos) + report.error(em"private $sym cannot override ${other.showLocated}", sym.srcPos) end checkNoPrivateOverrides /** Check that unary method definition do not receive parameters. @@ -1075,6 +1090,12 @@ object RefChecks { end checkImplicitNotFoundAnnotation + def checkAnyRefMethodCall(tree: Tree)(using Context) = + if tree.symbol.exists + && defn.topClasses.contains(tree.symbol.owner) + && (!ctx.owner.enclosingClass.exists || ctx.owner.enclosingClass.isPackageObject) then + report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) + } import RefChecks._ @@ -1148,12 +1169,16 @@ class RefChecks extends MiniPhase { thisPhase => checkAllOverrides(cls) checkImplicitNotFoundAnnotation.template(cls.classDenot) tree - } - catch { + } catch { case ex: TypeError => report.error(ex, tree.srcPos) tree } + + override def transformIdent(tree: Ident)(using Context): Tree = + checkAnyRefMethodCall(tree) + tree + } /* todo: rewrite and re-enable @@ -1664,7 +1689,7 @@ class RefChecks extends MiniPhase { thisPhase => // if (settings.warnNullaryUnit) // checkNullaryMethodReturnType(sym) // if (settings.warnInaccessible) { - // if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic) + // if (!sym.isEffectivelyFinal && !sym.isSynthetic) // checkAccessibilityOfReferencedTypes(tree) // } // tree match { diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index e3f5382ecad7..103961b68c29 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -28,26 +28,31 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): private type SpecialHandlers = List[(ClassSymbol, SpecialHandler)] val synthesizedClassTag: SpecialHandler = (formal, span) => + def instArg(tp: Type): Type = tp.stripTypeVar match + // Special case to avoid instantiating `Int & S` to `Int & Nothing` in + // i16328.scala. The intersection comes from an earlier instantiation + // to an upper bound. + // The dual situation with unions is harder to trigger because lower + // bounds are usually widened during instantiation. + case tp: AndOrType if tp.tp1 =:= tp.tp2 => + instArg(tp.tp1) + case _ => + if isFullyDefined(tp, ForceDegree.all) then tp + else NoType // this happens in tests/neg/i15372.scala + val tag = formal.argInfos match - case arg :: Nil if isFullyDefined(arg, ForceDegree.all) => - arg match + case arg :: Nil => + instArg(arg) match case defn.ArrayOf(elemTp) => val etag = typer.inferImplicitArg(defn.ClassTagClass.typeRef.appliedTo(elemTp), span) if etag.tpe.isError then EmptyTree else etag.select(nme.wrap) - case tp if hasStableErasure(tp) && !defn.isBottomClassAfterErasure(tp.typeSymbol) => + case tp if hasStableErasure(tp) && !tp.isBottomTypeAfterErasure => val sym = tp.typeSymbol val classTagModul = ref(defn.ClassTagModule) if defn.SpecialClassTagClasses.contains(sym) then classTagModul.select(sym.name.toTermName).withSpan(span) else - def clsOfType(tp: Type): Type = tp.dealias.underlyingMatchType match - case matchTp: MatchType => - matchTp.alternatives.map(clsOfType) match - case ct1 :: cts if cts.forall(ct1 == _) => ct1 - case _ => NoType - case _ => - escapeJavaArray(erasure(tp)) - val ctype = clsOfType(tp) + val ctype = escapeJavaArray(erasure(tp)) if ctype.exists then classTagModul.select(nme.apply) .appliedToType(tp) @@ -98,12 +103,12 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): def functionTypeEqual(baseFun: Type, actualArgs: List[Type], actualRet: Type, expected: Type) = expected =:= defn.FunctionOf(actualArgs, actualRet, - defn.isContextFunctionType(baseFun), defn.isErasedFunctionType(baseFun)) + defn.isContextFunctionType(baseFun)) val arity: Int = - if defn.isErasedFunctionType(fun) || defn.isErasedFunctionType(fun) then -1 // TODO support? + if defn.isErasedFunctionType(fun) then -1 // TODO support? else if defn.isFunctionType(fun) then // TupledFunction[(...) => R, ?] - fun.dropDependentRefinement.dealias.argInfos match + fun.functionArgInfos match case funArgs :+ funRet if functionTypeEqual(fun, defn.tupleType(funArgs) :: Nil, funRet, tupled) => // TupledFunction[(...funArgs...) => funRet, ?] @@ -111,7 +116,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case _ => -1 else if defn.isFunctionType(tupled) then // TupledFunction[?, (...) => R] - tupled.dropDependentRefinement.dealias.argInfos match + tupled.functionArgInfos match case tupledArgs :: funRet :: Nil => defn.tupleTypes(tupledArgs.dealias) match case Some(funArgs) if functionTypeEqual(tupled, funArgs, funRet, fun) => @@ -476,8 +481,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val elemLabels = cls.children.map(c => ConstantType(Constant(c.name.toString))) def internalError(msg: => String)(using Context): Unit = - report.error(i"""Internal error when synthesizing sum mirror for $cls: - |$msg""".stripMargin, ctx.source.atSpan(span)) + report.error(em"""Internal error when synthesizing sum mirror for $cls: + |$msg""", ctx.source.atSpan(span)) def childPrefix(child: Symbol)(using Context): Type = val symPre = TypeOps.childPrefix(pre, cls, child) @@ -691,10 +696,11 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val manifest = synthesize(fullyDefinedType(arg, "Manifest argument", ctx.source.atSpan(span)), kind, topLevel = true) if manifest != EmptyTree then report.deprecationWarning( - i"""Compiler synthesis of Manifest and OptManifest is deprecated, instead - |replace with the type `scala.reflect.ClassTag[$arg]`. - |Alternatively, consider using the new metaprogramming features of Scala 3, - |see https://docs.scala-lang.org/scala3/reference/metaprogramming.html""", ctx.source.atSpan(span)) + em"""Compiler synthesis of Manifest and OptManifest is deprecated, instead + |replace with the type `scala.reflect.ClassTag[$arg]`. + |Alternatively, consider using the new metaprogramming features of Scala 3, + |see https://docs.scala-lang.org/scala3/reference/metaprogramming.html""", + ctx.source.atSpan(span)) withNoErrors(manifest) case _ => EmptyTreeNoError diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index b90409e72364..be6121e13209 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -18,7 +18,7 @@ trait TypeAssigner { import TypeAssigner.* /** The qualifying class of a this or super with prefix `qual` (which might be empty). - * @param packageOk The qualifier may refer to a package. + * @param packageOK The qualifier may refer to a package. */ def qualifyingClass(tree: untpd.Tree, qual: Name, packageOK: Boolean)(using Context): Symbol = { def qualifies(sym: Symbol) = @@ -31,8 +31,9 @@ trait TypeAssigner { c case _ => report.error( - if (qual.isEmpty) tree.show + " can be used only in a class, object, or template" - else qual.show + " is not an enclosing class", tree.srcPos) + if qual.isEmpty then em"$tree can be used only in a class, object, or template" + else em"$qual is not an enclosing class", + tree.srcPos) NoSymbol } } @@ -76,21 +77,25 @@ trait TypeAssigner { * (2) in Java compilation units, `Object` is replaced by `defn.FromJavaObjectType` */ def accessibleType(tpe: Type, superAccess: Boolean)(using Context): Type = - tpe match + if ctx.isJava && tpe.isAnyRef then + defn.FromJavaObjectType + else tpe match case tpe: NamedType => - val pre = tpe.prefix - val name = tpe.name - def postProcess(d: Denotation) = - if ctx.isJava && tpe.isAnyRef then defn.FromJavaObjectType - else TypeOps.makePackageObjPrefixExplicit(tpe withDenot d) - val d = tpe.denot.accessibleFrom(pre, superAccess) - if d.exists then postProcess(d) + val tpe1 = TypeOps.makePackageObjPrefixExplicit(tpe) + if tpe1 ne tpe then + accessibleType(tpe1, superAccess) else - // it could be that we found an inaccessible private member, but there is - // an inherited non-private member with the same name and signature. - val d2 = pre.nonPrivateMember(name).accessibleFrom(pre, superAccess) - if reallyExists(d2) then postProcess(d2) - else NoType + val pre = tpe.prefix + val name = tpe.name + val d = tpe.denot.accessibleFrom(pre, superAccess) + if d eq tpe.denot then tpe + else if d.exists then tpe.withDenot(d) + else + // it could be that we found an inaccessible private member, but there is + // an inherited non-private member with the same name and signature. + val d2 = pre.nonPrivateMember(name).accessibleFrom(pre, superAccess) + if reallyExists(d2) then tpe.withDenot(d2) + else NoType case tpe => tpe /** Try to make `tpe` accessible, emit error if not possible */ @@ -127,7 +132,7 @@ trait TypeAssigner { def arrayElemType = qual1.tpe.widen match case JavaArrayType(elemtp) => elemtp case qualType => - report.error("Expected Array but was " + qualType.show, tree.srcPos) + report.error(em"Expected Array but was $qualType", tree.srcPos) defn.NothingType val name = tree.name @@ -167,26 +172,13 @@ trait TypeAssigner { case _ => false def addendum = err.selectErrorAddendum(tree, qual, qualType, importSuggestionAddendum, foundWithoutNull) val msg: Message = - if tree.name == nme.CONSTRUCTOR then ex"$qualType does not have a constructor".toMessage + if tree.name == nme.CONSTRUCTOR then em"$qualType does not have a constructor" else NotAMember(qualType, tree.name, kind, addendum) errorType(msg, tree.srcPos) def inaccessibleErrorType(tpe: NamedType, superAccess: Boolean, pos: SrcPos)(using Context): Type = - val pre = tpe.prefix - val name = tpe.name - val alts = tpe.denot.alternatives.map(_.symbol).filter(_.exists) - val whatCanNot = alts match - case Nil => - em"$name cannot" - case sym :: Nil => - em"${if (sym.owner == pre.typeSymbol) sym.show else sym.showLocated} cannot" - case _ => - em"none of the overloaded alternatives named $name can" - val where = if (ctx.owner.exists) s" from ${ctx.owner.enclosingClass}" else "" - val whyNot = new StringBuffer - alts.foreach(_.isAccessibleFrom(pre, superAccess, whyNot)) if tpe.isError then tpe - else errorType(ex"$whatCanNot be accessed as a member of $pre$where.$whyNot", pos) + else errorType(CannotBeAccessed(tpe, superAccess), pos) def processAppliedType(tree: untpd.Tree, tp: Type)(using Context): Type = tp match case AppliedType(tycon, args) => @@ -238,7 +230,7 @@ trait TypeAssigner { val cls = qualifyingClass(tree, tree.qual.name, packageOK = false) tree.withType( if (cls.isClass) cls.thisType - else errorType("not a legal qualifying class for this", tree.srcPos)) + else errorType(em"not a legal qualifying class for this", tree.srcPos)) } def superType(qualType: Type, mix: untpd.Ident, mixinClass: Symbol, pos: SrcPos)(using Context) = @@ -252,7 +244,7 @@ trait TypeAssigner { case Nil => errorType(SuperQualMustBeParent(mix, cls), pos) case p :: q :: _ => - errorType("ambiguous parent class qualifier", pos) + errorType(em"ambiguous parent class qualifier", pos) } val owntype = if (mixinClass.exists) mixinClass.typeRef @@ -291,25 +283,25 @@ trait TypeAssigner { def safeSubstMethodParams(mt: MethodType, argTypes: List[Type])(using Context): Type = if mt.isResultDependent then safeSubstParams(mt.resultType, mt.paramRefs, argTypes) - else if mt.isCaptureDependent then mt.resultType.substParams(mt, argTypes) else mt.resultType def assignType(tree: untpd.Apply, fn: Tree, args: List[Tree])(using Context): Apply = { val ownType = fn.tpe.widen match { case fntpe: MethodType => - if (fntpe.paramInfos.hasSameLengthAs(args) || ctx.phase.prev.relaxedTyping) - safeSubstMethodParams(fntpe, args.tpes) + if fntpe.paramInfos.hasSameLengthAs(args) || ctx.phase.prev.relaxedTyping then + if fntpe.isResultDependent then safeSubstMethodParams(fntpe, args.tpes) + else fntpe.resultType // fast path optimization else - errorType(i"wrong number of arguments at ${ctx.phase.prev} for $fntpe: ${fn.tpe}, expected: ${fntpe.paramInfos.length}, found: ${args.length}", tree.srcPos) + errorType(em"wrong number of arguments at ${ctx.phase.prev} for $fntpe: ${fn.tpe}, expected: ${fntpe.paramInfos.length}, found: ${args.length}", tree.srcPos) case t => if (ctx.settings.Ydebug.value) new FatalError("").printStackTrace() - errorType(err.takesNoParamsStr(fn, ""), tree.srcPos) + errorType(err.takesNoParamsMsg(fn, ""), tree.srcPos) } ConstFold.Apply(tree.withType(ownType)) } def assignType(tree: untpd.TypeApply, fn: Tree, args: List[Tree])(using Context): TypeApply = { - def fail = tree.withType(errorType(err.takesNoParamsStr(fn, "type "), tree.srcPos)) + def fail = tree.withType(errorType(err.takesNoParamsMsg(fn, "type "), tree.srcPos)) ConstFold(fn.tpe.widen match { case pt: TypeLambda => tree.withType { diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 33638df54fb1..74be1dee9a9b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -44,6 +44,7 @@ import config.Feature import config.Feature.{sourceVersion, migrateTo3} import config.SourceVersion._ import rewrites.Rewrites.patch +import staging.StagingLevel import transform.SymUtils._ import transform.TypeUtils._ import reporting._ @@ -73,12 +74,6 @@ object Typer { /** An attachment for GADT constraints that were inferred for a pattern. */ val InferredGadtConstraints = new Property.StickyKey[core.GadtConstraint] - /** A context property that indicates the owner of any expressions to be typed in the context - * if that owner is different from the context's owner. Typically, a context with a class - * as owner would have a local dummy as ExprOwner value. - */ - private val ExprOwner = new Property.Key[Symbol] - /** An attachment on a Select node with an `apply` field indicating that the `apply` * was inserted by the Typer. */ @@ -164,8 +159,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * @param required flags the result's symbol must have * @param excluded flags the result's symbol must not have * @param pos indicates position to use for error reporting + * @param altImports a ListBuffer in which alternative imported references are + * collected in case `findRef` is called from an expansion of + * an extension method, i.e. when `e.m` is expanded to `m(e)` and + * a reference for `m` is searched. `null` in all other situations. */ - def findRef(name: Name, pt: Type, required: FlagSet, excluded: FlagSet, pos: SrcPos)(using Context): Type = { + def findRef(name: Name, pt: Type, required: FlagSet, excluded: FlagSet, pos: SrcPos, + altImports: mutable.ListBuffer[TermRef] | Null = null)(using Context): Type = { val refctx = ctx val noImports = ctx.mode.is(Mode.InPackageClauseName) def suppressErrors = excluded.is(ConstructorProxy) @@ -236,29 +236,68 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer fail(AmbiguousReference(name, newPrec, prevPrec, prevCtx)) previous - /** Recurse in outer context. If final result is same as `previous`, check that it - * is new or shadowed. This order of checking is necessary since an - * outer package-level definition might trump two conflicting inner - * imports, so no error should be issued in that case. See i7876.scala. + /** Assemble and check alternatives to an imported reference. This implies: + * - If we expand an extension method (i.e. altImports != null), + * search imports on the same level for other possible resolutions of `name`. + * The result and altImports together then contain all possible imported + * references of the highest possible precedence, where `NamedImport` beats + * `WildImport`. + * - Find a posssibly shadowing reference in an outer context. + * If the result is the same as `previous`, check that it is new or + * shadowed. This order of checking is necessary since an outer package-level + * definition might trump two conflicting inner imports, so no error should be + * issued in that case. See i7876.scala. + * @param previous the previously found reference (which is an import) + * @param prevPrec the precedence of the reference (either NamedImport or WildImport) + * @param prevCtx the context in which the reference was found + * @param using_Context the outer context of `precCtx` */ - def recurAndCheckNewOrShadowed(previous: Type, prevPrec: BindingPrec, prevCtx: Context)(using Context): Type = - val found = findRefRecur(previous, prevPrec, prevCtx) - if found eq previous then checkNewOrShadowed(found, prevPrec)(using prevCtx) - else found + def checkImportAlternatives(previous: Type, prevPrec: BindingPrec, prevCtx: Context)(using Context): Type = + + def addAltImport(altImp: TermRef) = + if !TypeComparer.isSameRef(previous, altImp) + && !altImports.uncheckedNN.exists(TypeComparer.isSameRef(_, altImp)) + then + altImports.uncheckedNN += altImp + + if Feature.enabled(Feature.relaxedExtensionImports) && altImports != null && ctx.isImportContext then + val curImport = ctx.importInfo.uncheckedNN + namedImportRef(curImport) match + case altImp: TermRef => + if prevPrec == WildImport then + // Discard all previously found references and continue with `altImp` + altImports.clear() + checkImportAlternatives(altImp, NamedImport, ctx)(using ctx.outer) + else + addAltImport(altImp) + checkImportAlternatives(previous, prevPrec, prevCtx)(using ctx.outer) + case _ => + if prevPrec == WildImport then + wildImportRef(curImport) match + case altImp: TermRef => addAltImport(altImp) + case _ => + checkImportAlternatives(previous, prevPrec, prevCtx)(using ctx.outer) + else + val found = findRefRecur(previous, prevPrec, prevCtx) + if found eq previous then checkNewOrShadowed(found, prevPrec)(using prevCtx) + else found + end checkImportAlternatives def selection(imp: ImportInfo, name: Name, checkBounds: Boolean): Type = imp.importSym.info match case ImportType(expr) => val pre = expr.tpe - var denot = pre.memberBasedOnFlags(name, required, excluded) + val denot0 = pre.memberBasedOnFlags(name, required, excluded) .accessibleFrom(pre)(using refctx) // Pass refctx so that any errors are reported in the context of the // reference instead of the context of the import scope - if denot.exists then - if checkBounds then - denot = denot.filterWithPredicate { mbr => - mbr.matchesImportBound(if mbr.symbol.is(Given) then imp.givenBound else imp.wildcardBound) - } + if denot0.exists then + val denot = + if checkBounds then + denot0.filterWithPredicate { mbr => + mbr.matchesImportBound(if mbr.symbol.is(Given) then imp.givenBound else imp.wildcardBound) + } + else denot0 def isScalaJsPseudoUnion = denot.name == tpnme.raw.BAR && ctx.settings.scalajs.value && denot.symbol == JSDefinitions.jsdefn.PseudoUnionClass // Just like Scala2Unpickler reinterprets Scala.js pseudo-unions @@ -283,7 +322,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def checkUnambiguous(found: Type) = val other = recur(selectors.tail) if other.exists && found.exists && found != other then - fail(em"reference to `$name` is ambiguous; it is imported twice".toMessage) + fail(em"reference to `$name` is ambiguous; it is imported twice") found if selector.rename == termName && selector.rename != nme.WILDCARD then @@ -332,7 +371,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (ctx.scope eq EmptyScope) previous else { var result: Type = NoType - val curOwner = ctx.owner /** Is curOwner a package object that should be skipped? @@ -376,6 +414,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case denot => !denot.hasAltWith(isCurrent) def checkNoOuterDefs(denot: Denotation, last: Context, prevCtx: Context): Unit = + def sameTermOrType(d1: SingleDenotation, d2: Denotation) = + d2.containsSym(d1.symbol) || d2.hasUniqueSym && { + val sym1 = d1.symbol + val sym2 = d2.symbol + if sym1.isTerm then + sym1.isStableMember && + sym2.isStableMember && + sym1.termRef =:= sym2.termRef + else + (sym1.isAliasType || sym2.isAliasType) && d1.info =:= d2.info + } val outer = last.outer val owner = outer.owner if (owner eq last.owner) && (outer.scope eq last.scope) then @@ -385,7 +434,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val competing = scope.denotsNamed(name).filterWithFlags(required, excluded) if competing.exists then val symsMatch = competing - .filterWithPredicate(sd => denot.containsSym(sd.symbol)) + .filterWithPredicate(sd => sameTermOrType(sd, denot)) .exists if !symsMatch && !suppressErrors then report.errorOrMigrationWarning( @@ -442,11 +491,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else if (isPossibleImport(NamedImport) && (curImport nen outer.importInfo)) { val namedImp = namedImportRef(curImport.uncheckedNN) if (namedImp.exists) - recurAndCheckNewOrShadowed(namedImp, NamedImport, ctx)(using outer) + checkImportAlternatives(namedImp, NamedImport, ctx)(using outer) else if (isPossibleImport(WildImport) && !curImport.nn.importSym.isCompleting) { val wildImp = wildImportRef(curImport.uncheckedNN) if (wildImp.exists) - recurAndCheckNewOrShadowed(wildImp, WildImport, ctx)(using outer) + checkImportAlternatives(wildImp, WildImport, ctx)(using outer) else { updateUnimported() loop(ctx)(using outer) @@ -476,13 +525,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * (x: T | Null) => x.$asInstanceOf$[x.type & T] */ def toNotNullTermRef(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match - case ref @ OrNull(tpnn) : TermRef + case ref: TermRef if pt != AssignProto && // Ensure it is not the lhs of Assign ctx.notNullInfos.impliesNotNull(ref) && // If a reference is in the context, it is already trackable at the point we add it. // Hence, we don't use isTracked in the next line, because checking use out of order is enough. !ref.usedOutOfOrder => - tree.cast(AndType(ref, tpnn)) + ref match + case OrNull(tpnn) => tree.cast(AndType(ref, tpnn)) + case _ => tree case _ => tree @@ -525,7 +576,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val found = findRef(name, pt, EmptyFlags, EmptyFlags, tree.srcPos) if foundUnderScala2.exists && !(foundUnderScala2 =:= found) then report.migrationWarning( - ex"""Name resolution will change. + em"""Name resolution will change. | currently selected : $foundUnderScala2 | in the future, without -source 3.0-migration: $found""", tree.srcPos) foundUnderScala2 @@ -534,22 +585,40 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer unimported = saved1 foundUnderScala2 = saved2 - def checkNotShadowed(ownType: Type) = ownType match - case ownType: TermRef if ownType.symbol.is(ConstructorProxy) => - val shadowed = findRef(name, pt, EmptyFlags, ConstructorProxy, tree.srcPos) - if shadowed.exists then - report.error( - em"""Reference to constructor proxy for ${ownType.symbol.companionClass.showLocated} - |shadows outer reference to ${shadowed.termSymbol.showLocated}""", tree.srcPos) - case _ => + /** Normally, returns `ownType` except if `ownType` is a constructor proxy, + * and there is another shadowed type accessible with the same name that is not: + * - if the prototype is an application: + * - if the shadowed type has a method alternative or an apply method, + * issue an ambiguity error + * - otherwise again return `ownType` + * - if the prototype is not an application, return the shadowed type + */ + def checkNotShadowed(ownType: Type): Type = + ownType match + case ownType: TermRef if ownType.symbol.is(ConstructorProxy) => + findRef(name, pt, EmptyFlags, ConstructorProxy, tree.srcPos) match + case shadowed: TermRef if !shadowed.symbol.maybeOwner.isEmptyPackage => + pt match + case pt: FunOrPolyProto => + def err(shadowedIsApply: Boolean) = + report.error(ConstrProxyShadows(ownType, shadowed, shadowedIsApply), tree.srcPos) + if shadowed.denot.hasAltWith(sd => sd.symbol.is(Method, butNot = Accessor)) then + err(shadowedIsApply = false) + else if shadowed.member(nme.apply).hasAltWith(_.symbol.is(Method, butNot = Accessor)) then + err(shadowedIsApply = true) + case _ => + return shadowed + case shadowed => + case _ => + ownType def setType(ownType: Type): Tree = - checkNotShadowed(ownType) - val tree1 = ownType match - case ownType: NamedType if !prefixIsElidable(ownType) => - ref(ownType).withSpan(tree.span) + val checkedType = checkNotShadowed(ownType) + val tree1 = checkedType match + case checkedType: NamedType if !prefixIsElidable(checkedType) => + ref(checkedType).withSpan(tree.span) case _ => - tree.withType(ownType) + tree.withType(checkedType) val tree2 = toNotNullTermRef(tree1, pt) checkLegalValue(tree2, pt) tree2 @@ -584,7 +653,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else if ctx.owner.isConstructor && !ctx.owner.isPrimaryConstructor && ctx.owner.owner.unforcedDecls.lookup(tree.name).exists then // we are in the arguments of a this(...) constructor call - errorTree(tree, ex"$tree is not accessible from constructor arguments") + errorTree(tree, em"$tree is not accessible from constructor arguments") else errorTree(tree, MissingIdent(tree, kind, name)) end typedIdent @@ -609,11 +678,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val superAccess = qual.isInstanceOf[Super] val rawType = selectionType(tree, qual) val checkedType = accessibleType(rawType, superAccess) - if checkedType.exists then + + def finish(tree: untpd.Select, qual: Tree, checkedType: Type): Tree = val select = toNotNullTermRef(assignType(tree, checkedType), pt) if selName.isTypeName then checkStable(qual.tpe, qual.srcPos, "type prefix") checkLegalValue(select, pt) ConstFold(select) + + if checkedType.exists then + finish(tree, qual, checkedType) else if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then // Simplify `m.apply(...)` to `m(...)` qual @@ -622,9 +695,35 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, // but that is done only after we search for extension methods or conversions. typedSelect(tree, pt, qual) + else if defn.isSmallGenericTuple(qual.tpe) then + val elems = defn.tupleTypes(qual.tpe.widenTermRefExpr).getOrElse(Nil) + typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) else val tree1 = tryExtensionOrConversion( tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) + .orElse { + if ctx.gadt.isNarrowing then + // try GADT approximation if we're trying to select a member + // Member lookup cannot take GADTs into account b/c of cache, so we + // approximate types based on GADT constraints instead. For an example, + // see MemberHealing in gadt-approximation-interaction.scala. + val wtp = qual.tpe.widen + gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") + val gadtApprox = Inferencing.approximateGADT(wtp) + gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") + val qual1 = qual.cast(gadtApprox) + val tree1 = cpy.Select(tree0)(qual1, selName) + val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) + if checkedType1.exists then + gadts.println(i"Member selection healed by GADT approximation") + finish(tree1, qual1, checkedType1) + else if defn.isSmallGenericTuple(qual1.tpe) then + gadts.println(i"Tuple member selection healed by GADT approximation") + typedSelect(tree, pt, qual1) + else + tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) + else EmptyTree + } if !tree1.isEmpty then tree1 else if canDefineFurther(qual.tpe.widen) then @@ -673,7 +772,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer javaSelectOnType(qual2) case _ => - errorTree(tree, "cannot convert to type selection") // will never be printed due to fallback + errorTree(tree, em"cannot convert to type selection") // will never be printed due to fallback } def selectWithFallback(fallBack: Context ?=> Tree) = @@ -804,14 +903,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer isSkolemFree(pt) && isEligible(pt.underlyingClassRef(refinementOK = false))) templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) - templ1.parents foreach { - case parent: RefTree => - typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) - case _ => - } - val x = tpnme.ANON_CLASS - val clsDef = TypeDef(x, templ1).withFlags(Final | Synthetic) - typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(x), Nil)), pt) + for case parent: RefTree <- templ1.parents do + typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) + val anon = tpnme.ANON_CLASS + val clsDef = TypeDef(anon, templ1).withFlags(Final | Synthetic) + typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(anon), Nil)), pt) case _ => var tpt1 = typedType(tree.tpt) val tsym = tpt1.tpe.underlyingClassRef(refinementOK = false).typeSymbol @@ -987,8 +1083,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def lhs1 = adapt(lhsCore, AssignProto, locked) def reassignmentToVal = - errorTree(cpy.Assign(tree)(lhsCore, typed(tree.rhs, lhs1.tpe.widen)), - ReassignmentToVal(lhsCore.symbol.name)) + report.error(ReassignmentToVal(lhsCore.symbol.name), tree.srcPos) + cpy.Assign(tree)(lhsCore, typed(tree.rhs, lhs1.tpe.widen)).withType(defn.UnitType) def canAssign(sym: Symbol) = sym.is(Mutable, butNot = Accessor) || @@ -1066,6 +1162,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val (stats1, exprCtx) = withoutMode(Mode.Pattern) { typedBlockStats(tree.stats) } + var expr1 = typedExpr(tree.expr, pt.dropIfProto)(using exprCtx) // If unsafe nulls is enabled inside a block but not enabled outside @@ -1184,8 +1281,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ) end typedIf - /** Decompose function prototype into a list of parameter prototypes and a result prototype - * tree, using WildcardTypes where a type is not known. + /** Decompose function prototype into a list of parameter prototypes and a result + * prototype tree, using WildcardTypes where a type is not known. + * Note: parameter prototypes may be TypeBounds. * For the result type we do this even if the expected type is not fully * defined, which is a bit of a hack. But it's needed to make the following work * (see typers.scala and printers/PlainPrinter.scala for examples). @@ -1210,8 +1308,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && defn.isContextFunctionType(pt1.nonPrivateMember(nme.apply).info.finalResultType) then report.error( - i"""Implementation restriction: Expected result type $pt1 - |is a curried dependent context function type. Such types are not yet supported.""", + em"""Implementation restriction: Expected result type $pt1 + |is a curried dependent context function type. Such types are not yet supported.""", pos) pt1 match { case tp: TypeParamRef => @@ -1221,9 +1319,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // if expected parameter type(s) are wildcards, approximate from below. // if expected result type is a wildcard, approximate from above. // this can type the greatest set of admissible closures. - (pt1.argTypesLo.init, typeTree(interpolateWildcards(pt1.argTypesHi.last))) + + (pt1.argInfos.init, typeTree(interpolateWildcards(pt1.argInfos.last.hiBound))) case RefinedType(parent, nme.apply, mt @ MethodTpe(_, formals, restpe)) - if defn.isNonRefinedFunction(parent) && formals.length == defaultArity => + if (defn.isNonRefinedFunction(parent) || defn.isErasedFunctionType(parent)) && formals.length == defaultArity => (formals, untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef)))) case SAMType(mt @ MethodTpe(_, formals, restpe)) => (formals, @@ -1254,20 +1353,22 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * If both attempts fail, return `NoType`. */ def inferredFromTarget( - param: untpd.ValDef, formal: Type, calleeType: Type, paramIndex: Name => Int)(using Context): Type = + param: untpd.ValDef, formal: Type, calleeType: Type, isErased: Boolean, paramIndex: Name => Int)(using Context): Type = val target = calleeType.widen match case mtpe: MethodType => val pos = paramIndex(param.name) if pos < mtpe.paramInfos.length then - mtpe.paramInfos(pos) + val tp = mtpe.paramInfos(pos) // This works only if vararg annotations match up. // See neg/i14367.scala for an example where the inferred type is mispredicted. // Nevertheless, the alternative would be to give up completely, so this is // defensible. + // Strip inferred erased annotation, to avoid accidentally inferring erasedness + if !isErased then tp.stripAnnots(_.symbol != defn.ErasedParamAnnot) else tp else NoType case _ => NoType if target.exists then formal <:< target - if isFullyDefined(formal, ForceDegree.flipBottom) then formal + if !formal.isExactlyNothing && isFullyDefined(formal, ForceDegree.flipBottom) then formal else if target.exists && isFullyDefined(target, ForceDegree.flipBottom) then target else NoType @@ -1277,32 +1378,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedFunctionType(tree: untpd.Function, pt: Type)(using Context): Tree = { val untpd.Function(args, body) = tree - var funFlags = tree match { - case tree: untpd.FunctionWithMods => tree.mods.flags - case _ => EmptyFlags + body match + case untpd.CapturesAndResult(refs, result) => + return typedUnadapted(untpd.makeRetaining( + cpy.Function(tree)(args, result), refs, tpnme.retains), pt) + case _ => + var (funFlags, erasedParams) = tree match { + case tree: untpd.FunctionWithMods => (tree.mods.flags, tree.erasedParams) + case _ => (EmptyFlags, args.map(_ => false)) } - assert(!funFlags.is(Erased) || !args.isEmpty, "An empty function cannot not be erased") - val numArgs = args.length val isContextual = funFlags.is(Given) - val isErased = funFlags.is(Erased) val isImpure = funFlags.is(Impure) - val funSym = defn.FunctionSymbol(numArgs, isContextual, isErased, isImpure) - - /** If `app` is a function type with arguments that are all erased classes, - * turn it into an erased function type. - */ - def propagateErased(app: Tree): Tree = app match - case AppliedTypeTree(tycon: TypeTree, args) - if !isErased - && numArgs > 0 - && args.indexWhere(!_.tpe.isErasedClass) == numArgs => - val tycon1 = TypeTree(defn.FunctionSymbol(numArgs, isContextual, true, isImpure).typeRef) - .withSpan(tycon.span) - assignType(cpy.AppliedTypeTree(app)(tycon1, args), tycon1, args) - case _ => - app /** Typechecks dependent function type with given parameters `params` */ def typedDependent(params: List[untpd.ValDef])(using Context): Tree = @@ -1317,16 +1405,29 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if funFlags.is(Given) then params.map(_.withAddedFlags(Given)) else params val params2 = params1.map(fixThis.transformSub) - val appDef0 = untpd.DefDef(nme.apply, List(params2), body, EmptyTree).withSpan(tree.span) + val params3 = params2.zipWithConserve(erasedParams) { (arg, isErased) => + if isErased then arg.withAddedFlags(Erased) else arg + } + val appDef0 = untpd.DefDef(nme.apply, List(params3), body, EmptyTree).withSpan(tree.span) index(appDef0 :: Nil) val appDef = typed(appDef0).asInstanceOf[DefDef] val mt = appDef.symbol.info.asInstanceOf[MethodType] if (mt.isParamDependent) - report.error(i"$mt is an illegal function type because it has inter-parameter dependencies", tree.srcPos) + report.error(em"$mt is an illegal function type because it has inter-parameter dependencies", tree.srcPos) + // Restart typechecking if there are erased classes that we want to mark erased + if mt.erasedParams.zip(mt.paramInfos.map(_.isErasedClass)).exists((paramErased, classErased) => classErased && !paramErased) then + val newParams = params3.zipWithConserve(mt.paramInfos.map(_.isErasedClass)) { (arg, isErasedClass) => + if isErasedClass then arg.withAddedFlags(Erased) else arg + } + return typedDependent(newParams) val resTpt = TypeTree(mt.nonDependentResultApprox).withSpan(body.span) val typeArgs = appDef.termParamss.head.map(_.tpt) :+ resTpt - val tycon = TypeTree(funSym.typeRef) - val core = propagateErased(AppliedTypeTree(tycon, typeArgs)) + val core = + if mt.hasErasedParams then TypeTree(defn.ErasedFunctionClass.typeRef) + else + val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) + val tycon = TypeTree(funSym.typeRef) + AppliedTypeTree(tycon, typeArgs) RefinedTypeTree(core, List(appDef), ctx.owner.asClass) end typedDependent @@ -1335,17 +1436,25 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedDependent(args.asInstanceOf[List[untpd.ValDef]])( using ctx.fresh.setOwner(newRefinedClassSymbol(tree.span)).setNewScope) case _ => - propagateErased( - typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args :+ body), pt)) + if erasedParams.contains(true) then + typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) + else + val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) + val result = typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args :+ body), pt) + // if there are any erased classes, we need to re-do the typecheck. + result match + case r: AppliedTypeTree if r.args.exists(_.tpe.isErasedClass) => + typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) + case _ => result } } def typedFunctionValue(tree: untpd.Function, pt: Type)(using Context): Tree = { val untpd.Function(params: List[untpd.ValDef] @unchecked, _) = tree: @unchecked - val isContextual = tree match { - case tree: untpd.FunctionWithMods => tree.mods.is(Given) - case _ => false + val (isContextual, isDefinedErased) = tree match { + case tree: untpd.FunctionWithMods => (tree.mods.is(Given), tree.erasedParams) + case _ => (false, tree.args.map(_ => false)) } /** The function body to be returned in the closure. Can become a TypedSplice @@ -1446,9 +1555,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val (protoFormals, resultTpt) = decomposeProtoFunction(pt, params.length, tree.srcPos) - def protoFormal(i: Int): Type = - if (protoFormals.length == params.length) protoFormals(i) - else errorType(WrongNumberOfParameters(protoFormals.length), tree.srcPos) + /** Returns the type and whether the parameter is erased */ + def protoFormal(i: Int): (Type, Boolean) = + if (protoFormals.length == params.length) (protoFormals(i), isDefinedErased(i)) + else (errorType(WrongNumberOfParameters(protoFormals.length), tree.srcPos), false) /** Is `formal` a product type which is elementwise compatible with `params`? */ def ptIsCorrectProduct(formal: Type) = @@ -1460,11 +1570,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } var desugared: untpd.Tree = EmptyTree - if protoFormals.length == 1 && params.length != 1 && ptIsCorrectProduct(protoFormals.head) then - val isGenericTuple = - protoFormals.head.derivesFrom(defn.TupleClass) - && !defn.isTupleClass(protoFormals.head.typeSymbol) - desugared = desugar.makeTupledFunction(params, fnBody, isGenericTuple) + if protoFormals.length == 1 && params.length != 1 then + val firstFormal = protoFormals.head.loBound + if ptIsCorrectProduct(firstFormal) then + val isGenericTuple = + firstFormal.derivesFrom(defn.TupleClass) + && !defn.isTupleClass(firstFormal.typeSymbol) + desugared = desugar.makeTupledFunction(params, fnBody, isGenericTuple) else if protoFormals.length > 1 && params.length == 1 then def isParamRef(scrut: untpd.Tree): Boolean = scrut match case untpd.Annotated(scrut1, _) => isParamRef(scrut1) @@ -1486,18 +1598,29 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer for ((param, i) <- params.zipWithIndex) yield if (!param.tpt.isEmpty) param else - val formal = protoFormal(i) + val (formalBounds, isErased) = protoFormal(i) + val formal = formalBounds.loBound + val isBottomFromWildcard = (formalBounds ne formal) && formal.isExactlyNothing val knownFormal = isFullyDefined(formal, ForceDegree.failBottom) + // If the expected formal is a TypeBounds wildcard argument with Nothing as lower bound, + // try to prioritize inferring from target. See issue 16405 (tests/run/16405.scala) val paramType = - if knownFormal then formal - else inferredFromTarget(param, formal, calleeType, paramIndex) - .orElse(errorType(AnonymousFunctionMissingParamType(param, tree, formal), param.srcPos)) + // Strip inferred erased annotation, to avoid accidentally inferring erasedness + val formal0 = if !isErased then formal.stripAnnots(_.symbol != defn.ErasedParamAnnot) else formal + if knownFormal && !isBottomFromWildcard then + formal0 + else + inferredFromTarget(param, formal, calleeType, isErased, paramIndex).orElse( + if knownFormal then formal0 + else errorType(AnonymousFunctionMissingParamType(param, tree, formal), param.srcPos) + ) val paramTpt = untpd.TypedSplice( (if knownFormal then InferredTypeTree() else untpd.TypeTree()) .withType(paramType.translateFromRepeated(toArray = false)) .withSpan(param.span.endPos) ) - cpy.ValDef(param)(tpt = paramTpt) + val param0 = cpy.ValDef(param)(tpt = paramTpt) + if isErased then param0.withAddedFlags(Flags.Erased) else param0 desugared = desugar.makeClosure(inferredParams, fnBody, resultTpt, isContextual, tree.span) typed(desugared, pt) @@ -1526,17 +1649,21 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Replace the underspecified expected type by one based on the closure method type defn.PartialFunctionOf(mt.firstParamTypes.head, mt.resultType) else - report.error(ex"result type of lambda is an underspecified SAM type $pt", tree.srcPos) + report.error(em"result type of lambda is an underspecified SAM type $pt", tree.srcPos) pt TypeTree(targetTpe) case _ => if (mt.isParamDependent) errorTree(tree, - i"""cannot turn method type $mt into closure - |because it has internal parameter dependencies""") + em"""cannot turn method type $mt into closure + |because it has internal parameter dependencies""") else if ((tree.tpt `eq` untpd.ContextualEmptyTree) && mt.paramNames.isEmpty) // Note implicitness of function in target type since there are no method parameters that indicate it. - TypeTree(defn.FunctionOf(Nil, mt.resType, isContextual = true, isErased = false)) + TypeTree(defn.FunctionOf(Nil, mt.resType, isContextual = true)) + else if hasCaptureConversionArg(mt.resType) then + errorTree(tree, + em"""cannot turn method type $mt into closure + |because it has capture conversion skolem types""") else EmptyTree } @@ -1565,9 +1692,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } else { val (protoFormals, _) = decomposeProtoFunction(pt, 1, tree.srcPos) - val checkMode = - if (pt.isRef(defn.PartialFunctionClass)) desugar.MatchCheck.None - else desugar.MatchCheck.Exhaustive + val checkMode = desugar.MatchCheck.Exhaustive typed(desugar.makeCaseLambda(tree.cases, checkMode, protoFormals.length).withSpan(tree.span), pt) } case _ => @@ -1650,7 +1775,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // skip exhaustivity check in later phase // TODO: move the check above to patternMatcher phase - val uncheckedTpe = AnnotatedType(sel.tpe.widen, Annotation(defn.UncheckedAnnot)) + val uncheckedTpe = AnnotatedType(sel.tpe.widen, Annotation(defn.UncheckedAnnot, tree.selector.span)) tpd.cpy.Match(result)( selector = tpd.Typed(sel, tpd.TypeTree(uncheckedTpe)), cases = result.cases @@ -1781,7 +1906,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var body1 = typedType(cdef.body, pt) if !body1.isType then assert(ctx.reporter.errorsReported) - body1 = TypeTree(errorType("", cdef.srcPos)) + body1 = TypeTree(errorType(em"", cdef.srcPos)) assignType(cpy.CaseDef(cdef)(pat2, EmptyTree, body1), pat2, body1) } caseRest(using ctx.fresh.setFreshGADTBounds.setNewScope) @@ -1885,7 +2010,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer Typed(res, TypeTree( AnnotatedType(res.tpe, - Annotation(defn.RequiresCapabilityAnnot, cap)))) + Annotation(defn.RequiresCapabilityAnnot, cap, tree.span)))) else res def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(using Context): SeqLiteral = { @@ -1929,7 +2054,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer .withType( if isFullyDefined(pt, ForceDegree.flipBottom) then pt else if ctx.reporter.errorsReported then UnspecifiedErrorType - else errorType(i"cannot infer type; expected type $pt is not fully defined", tree.srcPos)) + else errorType(em"cannot infer type; expected type $pt is not fully defined", tree.srcPos)) def typedTypeTree(tree: untpd.TypeTree, pt: Type)(using Context): Tree = tree match @@ -1943,13 +2068,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // untyped tree is no longer accessed after all // accesses with typedTypeTree are done. case None => - errorTree(tree, "Something's wrong: missing original symbol for type tree") + errorTree(tree, em"Something's wrong: missing original symbol for type tree") } case _ => completeTypeTree(InferredTypeTree(), pt, tree) def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { - val ref1 = typedExpr(tree.ref) + val ref1 = typedExpr(tree.ref, SingletonTypeProto) checkStable(ref1.tpe, tree.srcPos, "singleton type") assignType(cpy.SingletonTypeTree(tree)(ref1), ref1) } @@ -1984,9 +2109,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree.args match case arg :: _ if arg.isTerm => if Feature.dependentEnabled then - return errorTree(tree, i"Not yet implemented: T(...)") + return errorTree(tree, em"Not yet implemented: T(...)") else - return errorTree(tree, dependentStr) + return errorTree(tree, dependentMsg) case _ => val tpt1 = withoutMode(Mode.Pattern) { @@ -2096,6 +2221,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && checkedArgs(1).tpe.derivesFrom(defn.RuntimeExceptionClass) then report.error(em"throws clause cannot be defined for RuntimeException", checkedArgs(1).srcPos) + else if tycon == defn.IntoType then + // is defined in package scala but this should be hidden from user programs + report.error(em"not found: ", tpt1.srcPos) else if (ctx.isJava) if tycon eq defn.ArrayClass then checkedArgs match { @@ -2122,24 +2250,30 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedTermLambdaTypeTree(tree: untpd.TermLambdaTypeTree)(using Context): Tree = if Feature.dependentEnabled then - errorTree(tree, i"Not yet implemented: (...) =>> ...") + errorTree(tree, em"Not yet implemented: (...) =>> ...") else - errorTree(tree, dependentStr) + errorTree(tree, dependentMsg) def typedMatchTypeTree(tree: untpd.MatchTypeTree, pt: Type)(using Context): Tree = { val bound1 = if (tree.bound.isEmpty && isFullyDefined(pt, ForceDegree.none)) TypeTree(pt) else typed(tree.bound) val sel1 = typed(tree.selector) + val sel1Tpe = sel1.tpe + if sel1Tpe.isLambdaSub then + report.error(MatchTypeScrutineeCannotBeHigherKinded(sel1Tpe), sel1.srcPos) val pt1 = if (bound1.isEmpty) pt else bound1.tpe - val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1.tpe, pt1)) + val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1Tpe, pt1)) assignType(cpy.MatchTypeTree(tree)(bound1, sel1, cases1), bound1, sel1, cases1) } - def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(using Context): ByNameTypeTree = { - val result1 = typed(tree.result) - assignType(cpy.ByNameTypeTree(tree)(result1), result1) - } + def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(using Context): ByNameTypeTree = tree.result match + case untpd.CapturesAndResult(refs, tpe) => + typedByNameTypeTree( + cpy.ByNameTypeTree(tree)(untpd.makeRetaining(tpe, refs, tpnme.retainsByName))) + case _ => + val result1 = typed(tree.result) + assignType(cpy.ByNameTypeTree(tree)(result1), result1) def typedTypeBoundsTree(tree: untpd.TypeBoundsTree, pt: Type)(using Context): Tree = val TypeBoundsTree(lo, hi, alias) = tree @@ -2148,15 +2282,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val alias1 = typed(alias) val lo2 = if (lo1.isEmpty) typed(untpd.TypeTree(defn.NothingType)) else lo1 val hi2 = if (hi1.isEmpty) typed(untpd.TypeTree(defn.AnyType)) else hi1 - if !alias1.isEmpty then - val bounds = TypeBounds(lo2.tpe, hi2.tpe) - if !bounds.contains(alias1.tpe) then - report.error(em"type ${alias1.tpe} outside bounds $bounds", tree.srcPos) assignType(cpy.TypeBoundsTree(tree)(lo2, hi2, alias1), lo2, hi2, alias1) def typedBind(tree: untpd.Bind, pt: Type)(using Context): Tree = { if !isFullyDefined(pt, ForceDegree.all) then - return errorTree(tree, i"expected type of $tree is not fully defined") + return errorTree(tree, em"expected type of $tree is not fully defined") val body1 = typed(tree.body, pt) body1 match { case UnApply(fn, Nil, arg :: Nil) @@ -2222,29 +2352,23 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** The context to be used for an annotation of `mdef`. * This should be the context enclosing `mdef`, or if `mdef` defines a parameter * the context enclosing the owner of `mdef`. - * Furthermore, we need to evaluate annotation arguments in an expression context, - * since classes defined in a such arguments should not be entered into the - * enclosing class. + * Furthermore, we need to make sure that annotation trees are evaluated + * with an owner that is not the enclosing class since otherwise locally + * defined symbols would be entered as class members. */ - def annotContext(mdef: untpd.Tree, sym: Symbol)(using Context): Context = { + def annotContext(mdef: untpd.Tree, sym: Symbol)(using Context): Context = def isInner(owner: Symbol) = owner == sym || sym.is(Param) && owner == sym.owner val outer = ctx.outersIterator.dropWhile(c => isInner(c.owner)).next() - var adjusted = outer.property(ExprOwner) match { - case Some(exprOwner) if outer.owner.isClass => outer.exprContext(mdef, exprOwner) - case _ => outer - } + def local: FreshContext = outer.fresh.setOwner(newLocalDummy(sym.owner)) sym.owner.infoOrCompleter match - case completer: Namer#Completer if sym.is(Param) => - val tparams = completer.completerTypeParams(sym) - if tparams.nonEmpty then - // Create a new local context with a dummy owner and a scope containing the - // type parameters of the enclosing method or class. Thus annotations can see - // these type parameters. See i12953.scala for a test case. - val dummyOwner = newLocalDummy(sym.owner) - adjusted = adjusted.fresh.setOwner(dummyOwner).setScope(newScopeWith(tparams*)) + case completer: Namer#Completer + if sym.is(Param) && completer.completerTypeParams(sym).nonEmpty => + // Create a new local context with a dummy owner and a scope containing the + // type parameters of the enclosing method or class. Thus annotations can see + // these type parameters. See i12953.scala for a test case. + local.setScope(newScopeWith(completer.completerTypeParams(sym)*)) case _ => - adjusted - } + if outer.owner.isClass then local else outer def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(using Context): Unit = { // necessary to force annotation trees to be computed. @@ -2259,7 +2383,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedAnnotation(annot: untpd.Tree)(using Context): Tree = - checkAnnotArgs(typed(annot, defn.AnnotationClass.typeRef)) + checkAnnotClass(checkAnnotArgs(typed(annot))) def registerNowarn(tree: Tree, mdef: untpd.Tree)(using Context): Unit = val annot = Annotations.Annotation(tree) @@ -2305,11 +2429,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = { - if (!sym.info.exists) { // it's a discarded synthetic case class method, drop it - assert(sym.is(Synthetic) && desugar.isRetractableCaseClassMethodName(sym.name)) + def canBeInvalidated(sym: Symbol): Boolean = + sym.is(Synthetic) + && (desugar.isRetractableCaseClassMethodName(sym.name) || + (sym.isConstructor && sym.owner.derivesFrom(defn.JavaRecordClass))) + + if !sym.info.exists then + // it's a discarded method (synthetic case class method or synthetic java record constructor), drop it + assert(canBeInvalidated(sym)) sym.owner.info.decls.openForMutations.unlink(sym) return EmptyTree - } + // TODO: - Remove this when `scala.language.experimental.erasedDefinitions` is no longer experimental. // - Modify signature to `erased def erasedValue[T]: T` if sym.eq(defn.Compiletime_erasedValue) then @@ -2336,7 +2466,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ctx.outer.outersIterator.takeWhile(!_.owner.is(Method)) .filter(ctx => ctx.owner.isClass && ctx.owner.typeParams.nonEmpty) .toList.reverse - .foreach(ctx => rhsCtx.gadt.addToConstraint(ctx.owner.typeParams)) + .foreach(ctx => rhsCtx.gadtState.addToConstraint(ctx.owner.typeParams)) if tparamss.nonEmpty then rhsCtx.setFreshGADTBounds @@ -2345,7 +2475,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // we're typing a polymorphic definition's body, // so we allow constraining all of its type parameters // constructors are an exception as we don't allow constraining type params of classes - rhsCtx.gadt.addToConstraint(tparamSyms) + rhsCtx.gadtState.addToConstraint(tparamSyms) else if !sym.isPrimaryConstructor then linkConstructorParams(sym, tparamSyms, rhsCtx) @@ -2356,8 +2486,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then - if StagingContext.level > 0 then + if StagingLevel.level > 0 then report.error("inline def cannot be within quotes", sym.sourcePos) + if sym.is(Given) + && untpd.stripBlock(untpd.unsplice(ddef.rhs)).isInstanceOf[untpd.Function] + then + report.warning(InlineGivenShouldNotBeFunction(), ddef.rhs.srcPos) val rhsToInline = PrepareInlineable.wrapRHS(ddef, tpt1, rhs1) PrepareInlineable.registerInlineInfo(sym, rhsToInline) @@ -2444,7 +2578,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // error if the same parent was explicitly added in user code. if !tree.span.isSourceDerived then return EmptyTree - if !ctx.isAfterTyper then report.error(i"$psym is extended twice", tree.srcPos) + if !ctx.isAfterTyper then report.error(em"$psym is extended twice", tree.srcPos) else seenParents += psym val result = ensureConstrCall(cls, parent, psym)(using superCtx) @@ -2453,6 +2587,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkSimpleKinded(parent) // allow missing type parameters if there are implicit arguments to pass // since we can infer type arguments from them + val constr = psym.primaryConstructor + if psym.is(Trait) && constr.exists && !cls.isRefinementClass then + ensureAccessible(constr.termRef, superAccess = true, tree.srcPos) else checkParentCall(result, cls) if cls is Case then @@ -2555,13 +2692,26 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // check value class constraints checkDerivedValueClass(cls, body1) + // check PolyFunction constraints (no erased functions!) + if parents1.exists(_.tpe.classSymbol eq defn.PolyFunctionClass) then + body1.foreach { + case ddef: DefDef => + ddef.paramss.foreach { params => + val erasedParam = params.collectFirst { case vdef: ValDef if vdef.symbol.is(Erased) => vdef } + erasedParam.foreach { p => + report.error(em"Implementation restriction: erased classes are not allowed in a poly function definition", p.srcPos) + } + } + case _ => + } + val effectiveOwner = cls.owner.skipWeakOwner if !cls.isRefinementClass && !cls.isAllOf(PrivateLocal) && effectiveOwner.is(Trait) && !effectiveOwner.derivesFrom(defn.ObjectClass) then - report.error(i"$cls cannot be defined in universal $effectiveOwner", cdef.srcPos) + report.error(em"$cls cannot be defined in universal $effectiveOwner", cdef.srcPos) // Temporarily set the typed class def as root tree so that we have at least some // information in the IDE in case we never reach `SetRootTree`. @@ -2595,6 +2745,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer */ def ensureConstrCall(cls: ClassSymbol, parent: Tree, psym: Symbol)(using Context): Tree = if parent.isType && !cls.is(Trait) && !cls.is(JavaDefined) && psym.isClass + // Annotations are represented as traits with constructors, but should + // never be called as such outside of annotation trees. + && !psym.is(JavaAnnotation) && (!psym.is(Trait) || psym.primaryConstructor.info.takesParams && !cls.superClass.isSubClass(psym)) then typed(untpd.New(untpd.TypedSplice(parent), Nil)) @@ -2665,11 +2818,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Package will not exist if a duplicate type has already been entered, see `tests/neg/1708.scala` errorTree(tree, if pkg.exists then PackageNameAlreadyDefined(pkg) - else i"package ${tree.pid.name} does not exist".toMessage) + else em"package ${tree.pid.name} does not exist") end typedPackageDef def typedAnnotated(tree: untpd.Annotated, pt: Type)(using Context): Tree = { - val annot1 = typedExpr(tree.annot, defn.AnnotationClass.typeRef) + val annot1 = checkAnnotClass(typedExpr(tree.annot)) val annotCls = Annotations.annotClass(annot1) if annotCls == defn.NowarnAnnot then registerNowarn(annot1, tree) @@ -2745,8 +2898,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if ((prefix ++ suffix).isEmpty) "simply leave out the trailing ` _`" else s"use `$prefix$suffix` instead" report.errorOrMigrationWarning( - i"""The syntax ` _` is no longer supported; - |you can $remedy""", + em"""The syntax ` _` is no longer supported; + |you can $remedy""", tree.srcPos, from = future) if sourceVersion.isMigrating then @@ -2878,7 +3031,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else "" val namePos = tree.sourcePos.withSpan(tree.nameSpan) report.errorOrMigrationWarning( - s"`?` is not a valid type name$addendum", namePos, from = `3.0`) + em"`?` is not a valid type name$addendum", namePos, from = `3.0`) if tree.isClassDef then typedClassDef(tree, sym.asClass)(using ctx.localContext(tree, sym)) else @@ -2930,13 +3083,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.TypedSplice => typedTypedSplice(tree) case tree: untpd.UnApply => typedUnApply(tree, pt) case tree: untpd.Tuple => typedTuple(tree, pt) - case tree: untpd.DependentTypeTree => completeTypeTree(untpd.TypeTree(), pt, tree) + case tree: untpd.DependentTypeTree => completeTypeTree(untpd.InferredTypeTree(), pt, tree) case tree: untpd.InfixOp => typedInfixOp(tree, pt) case tree: untpd.ParsedTry => typedTry(tree, pt) case tree @ untpd.PostfixOp(qual, Ident(nme.WILDCARD)) => typedAsFunction(tree, pt) case untpd.EmptyTree => tpd.EmptyTree case tree: untpd.Quote => typedQuote(tree, pt) case tree: untpd.Splice => typedSplice(tree, pt) + case tree: untpd.SplicePattern => typedSplicePattern(tree, pt) case tree: untpd.MacroTree => report.error("Unexpected macro", tree.srcPos); tpd.nullLiteral // ill-formed code may reach here case tree: untpd.Hole => typedHole(tree, pt) case _ => typedUnadapted(desugar(tree, pt), pt, locked) @@ -2978,7 +3132,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree protected def makeContextualFunction(tree: untpd.Tree, pt: Type)(using Context): Tree = { - val defn.FunctionOf(formals, _, true, _) = pt.dropDependentRefinement: @unchecked + val defn.FunctionOf(formals, _, true) = pt.dropDependentRefinement: @unchecked // The getter of default parameters may reach here. // Given the code below @@ -3006,7 +3160,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else formals.map(untpd.TypeTree) } - val ifun = desugar.makeContextualFunction(paramTypes, tree, defn.isErasedFunctionType(pt)) + val erasedParams = pt.dealias match { + case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams + case _ => paramTypes.map(_ => false) + } + + val ifun = desugar.makeContextualFunction(paramTypes, tree, erasedParams) typr.println(i"make contextual function $tree / $pt ---> $ifun") typedFunctionValue(ifun, pt) } @@ -3076,16 +3235,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer traverse(xtree :: rest) case stat :: rest => val stat1 = typed(stat)(using ctx.exprContext(stat, exprOwner)) - checkStatementPurity(stat1)(stat, exprOwner) + if !checkInterestingResultInStatement(stat1) then checkStatementPurity(stat1)(stat, exprOwner) buf += stat1 traverse(rest)(using stat1.nullableContext) case nil => (buf.toList, ctx) } - val localCtx = { - val exprOwnerOpt = if (exprOwner == ctx.owner) None else Some(exprOwner) - ctx.withProperty(ExprOwner, exprOwnerOpt) - } def finalize(stat: Tree)(using Context): Tree = stat match { case stat: TypeDef if stat.symbol.is(Module) => val enumContext = enumContexts(stat.symbol.linkedClass) @@ -3098,7 +3253,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => stat } - val (stats0, finalCtx) = traverse(stats)(using localCtx) + val (stats0, finalCtx) = traverse(stats) val stats1 = stats0.mapConserve(finalize) if ctx.owner == exprOwner then checkNoTargetNameConflict(stats1) (stats1, finalCtx) @@ -3312,11 +3467,37 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def selectionProto = SelectionProto(tree.name, mbrProto, compat, privateOK = inSelect) def tryExtension(using Context): Tree = - findRef(tree.name, WildcardType, ExtensionMethod, EmptyFlags, qual.srcPos) match + val altImports = new mutable.ListBuffer[TermRef]() + findRef(tree.name, WildcardType, ExtensionMethod, EmptyFlags, qual.srcPos, altImports) match case ref: TermRef => - extMethodApply(untpd.TypedSplice(tpd.ref(ref).withSpan(tree.nameSpan)), qual, pt) + def tryExtMethod(ref: TermRef)(using Context) = + extMethodApply(untpd.TypedSplice(tpd.ref(ref).withSpan(tree.nameSpan)), qual, pt) + if altImports.isEmpty then + tryExtMethod(ref) + else + // Try all possible imports and collect successes and failures + val successes, failures = new mutable.ListBuffer[(Tree, TyperState)] + for alt <- ref :: altImports.toList do + val nestedCtx = ctx.fresh.setNewTyperState() + val app = tryExtMethod(alt)(using nestedCtx) + (if nestedCtx.reporter.hasErrors then failures else successes) + += ((app, nestedCtx.typerState)) + typr.println(i"multiple extensioin methods, success: ${successes.toList}, failure: ${failures.toList}") + + def pick(alt: (Tree, TyperState)): Tree = + val (app, ts) = alt + ts.commit() + app + + successes.toList match + case Nil => pick(failures.head) + case success :: Nil => pick(success) + case (expansion1, _) :: (expansion2, _) :: _ => + report.error(AmbiguousExtensionMethod(tree, expansion1, expansion2), tree.srcPos) + expansion1 case _ => EmptyTree + end tryExtension def nestedFailure(ex: TypeError) = rememberSearchFailure(qual, @@ -3347,7 +3528,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case SearchSuccess(found, _, _, isExtension) => if isExtension then return found else - checkImplicitConversionUseOK(found) + checkImplicitConversionUseOK(found, selProto) return withoutMode(Mode.ImplicitsEnabled)(typedSelect(tree, pt, found)) case failure: SearchFailure => if failure.isAmbiguous then @@ -3411,7 +3592,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer adapt(tree, pt, ctx.typerState.ownedVars) private def adapt1(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = { - assert(pt.exists && !pt.isInstanceOf[ExprType] || ctx.reporter.errorsReported) + assert(pt.exists && !pt.isInstanceOf[ExprType] || ctx.reporter.errorsReported, i"tree: $tree, pt: $pt") def methodStr = err.refStr(methPart(tree).tpe) def readapt(tree: Tree)(using Context) = adapt(tree, pt, locked) @@ -3421,42 +3602,59 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ErrorReporting.missingArgs(tree, mt) tree.withType(mt.resultType) - def adaptOverloaded(ref: TermRef) = { + def adaptOverloaded(ref: TermRef) = + // get all the alternatives val altDenots = val allDenots = ref.denot.alternatives if pt.isExtensionApplyProto then allDenots.filter(_.symbol.is(ExtensionMethod)) else allDenots + typr.println(i"adapt overloaded $ref with alternatives ${altDenots map (_.info)}%\n\n %") + + /** Search for an alternative that does not take parameters. + * If there is one, return it, otherwise emit an error. + */ + def tryParameterless(alts: List[TermRef])(error: => tpd.Tree): Tree = + alts.filter(_.info.isParameterless) match + case alt :: Nil => readaptSimplified(tree.withType(alt)) + case _ => + if altDenots.exists(_.info.paramInfoss == ListOfNil) then + typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt, locked) + else + error + def altRef(alt: SingleDenotation) = TermRef(ref.prefix, ref.name, alt) val alts = altDenots.map(altRef) - resolveOverloaded(alts, pt) match { + + resolveOverloaded(alts, pt) match case alt :: Nil => readaptSimplified(tree.withType(alt)) case Nil => - // If alternative matches, there are still two ways to recover: + // If no alternative matches, there are still two ways to recover: // 1. If context is an application, try to insert an apply or implicit // 2. If context is not an application, pick a alternative that does // not take parameters. - def noMatches = - errorTree(tree, NoMatchingOverload(altDenots, pt)) - def hasEmptyParams(denot: SingleDenotation) = denot.info.paramInfoss == ListOfNil - pt match { + + def errorNoMatch = errorTree(tree, NoMatchingOverload(altDenots, pt)) + + pt match case pt: FunOrPolyProto if pt.applyKind != ApplyKind.Using => // insert apply or convert qualifier, but only for a regular application - tryInsertApplyOrImplicit(tree, pt, locked)(noMatches) + tryInsertApplyOrImplicit(tree, pt, locked)(errorNoMatch) case _ => - alts.filter(_.info.isParameterless) match { - case alt :: Nil => readaptSimplified(tree.withType(alt)) - case _ => - if (altDenots exists (_.info.paramInfoss == ListOfNil)) - typed(untpd.Apply(untpd.TypedSplice(tree), Nil), pt, locked) - else - noMatches - } - } + tryParameterless(alts)(errorNoMatch) + case ambiAlts => - if tree.tpe.isErroneous || pt.isErroneous then tree.withType(UnspecifiedErrorType) - else + // If there are ambiguous alternatives, and: + // 1. the types aren't erroneous + // 2. the expected type is not a function type + // 3. there exist a parameterless alternative + // + // Then, pick the parameterless alternative. + // See tests/pos/i10715-scala and tests/pos/i10715-java. + + /** Constructs an "ambiguous overload" error */ + def errorAmbiguous = val remainingDenots = altDenots.filter(denot => ambiAlts.contains(altRef(denot))) val addendum = if ambiAlts.exists(!_.symbol.exists) then @@ -3465,8 +3663,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer |Note: Overloaded definitions introduced by refinements cannot be resolved""" else "" errorTree(tree, AmbiguousOverload(tree, remainingDenots, pt, addendum)) - } - } + end errorAmbiguous + + if tree.tpe.isErroneous || pt.isErroneous then + tree.withType(UnspecifiedErrorType) + else + pt match + case _: FunProto => + errorAmbiguous + case _ => + tryParameterless(alts)(errorAmbiguous) + + end match + end adaptOverloaded def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match { case wtp: MethodOrPoly => @@ -3703,7 +3912,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (!defn.isFunctionType(pt)) pt match { case SAMType(_) if !pt.classSymbol.hasAnnotation(defn.FunctionalInterfaceAnnot) => - report.warning(ex"${tree.symbol} is eta-expanded even though $pt does not have the @FunctionalInterface annotation.", tree.srcPos) + report.warning(em"${tree.symbol} is eta-expanded even though $pt does not have the @FunctionalInterface annotation.", tree.srcPos) case _ => } simplify(typed(etaExpand(tree, wtp, arity), pt), pt, locked) @@ -3726,24 +3935,24 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer true } - if ((implicitFun || caseCompanion) && - !isApplyProto(pt) && - pt != AssignProto && - !ctx.mode.is(Mode.Pattern) && - !ctx.isAfterTyper && - !ctx.isInlineContext) { + if (implicitFun || caseCompanion) + && !isApplyProto(pt) + && pt != SingletonTypeProto + && pt != AssignProto + && !ctx.mode.is(Mode.Pattern) + && !ctx.isAfterTyper + && !ctx.isInlineContext + then typr.println(i"insert apply on implicit $tree") val sel = untpd.Select(untpd.TypedSplice(tree), nme.apply).withAttachment(InsertedApply, ()) try typed(sel, pt, locked) finally sel.removeAttachment(InsertedApply) - } - else if (ctx.mode is Mode.Pattern) { + else if ctx.mode is Mode.Pattern then checkEqualityEvidence(tree, pt) tree - } else val meth = methPart(tree).symbol if meth.isAllOf(DeferredInline) && !Inlines.inInlineMethod then - errorTree(tree, i"Deferred inline ${meth.showLocated} cannot be invoked") + errorTree(tree, em"Deferred inline ${meth.showLocated} cannot be invoked") else if Inlines.needsInlining(tree) then tree.tpe <:< wildApprox(pt) val errorCount = ctx.reporter.errorCount @@ -3763,8 +3972,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } else { report.error( - """Scala 2 macro cannot be used in Dotty. See https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html - |To turn this error into a warning, pass -Xignore-scala2-macros to the compiler""".stripMargin, tree.srcPos.startPos) + em"""Scala 2 macro cannot be used in Dotty. See https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html + |To turn this error into a warning, pass -Xignore-scala2-macros to the compiler""", + tree.srcPos.startPos) tree } else TypeComparer.testSubType(tree.tpe.widenExpr, pt) match @@ -3777,7 +3987,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer adaptToSubType(wtp) case CompareResult.OKwithGADTUsed if pt.isValueType - && !inContext(ctx.fresh.setGadt(GadtConstraint.empty)) { + && !inContext(ctx.fresh.setGadtState(GadtState(GadtConstraint.empty))) { val res = (tree.tpe.widenExpr frozen_<:< pt) if res then // we overshot; a cast is not needed, after all. @@ -3842,7 +4052,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else defn.functionArity(ptNorm) else val nparams = wtp.paramInfos.length - if nparams > 0 || pt.eq(AnyFunctionProto) then nparams + if nparams > 1 + || nparams == 1 && !wtp.isVarArgsMethod + || pt.eq(AnyFunctionProto) + then nparams else -1 // no eta expansion in this case adaptNoArgsUnappliedMethod(wtp, funExpected, arity) case _ => @@ -3882,7 +4095,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return adaptConstant(tree, ConstantType(converted)) case _ => - val captured = captureWildcards(wtp) + val captured = captureWildcardsCompat(wtp, pt) if (captured `ne` wtp) return readapt(tree.cast(captured)) @@ -3892,6 +4105,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // so will take the code path that decides on inlining val tree1 = adapt(tree, WildcardType, locked) checkStatementPurity(tree1)(tree, ctx.owner) + if (!ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && ctx.settings.WvalueDiscard.value && !isThisTypeResult(tree)) { + report.warning(ValueDiscarding(tree.tpe), tree.srcPos) + } return tpd.Block(tree1 :: Nil, Literal(Constant(()))) } @@ -3933,27 +4149,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else err.typeMismatch(tree, pt, failure) pt match - case pt: SelectionProto => - if ctx.gadt.isNarrowing then - // try GADT approximation if we're trying to select a member - // Member lookup cannot take GADTs into account b/c of cache, so we - // approximate types based on GADT constraints instead. For an example, - // see MemberHealing in gadt-approximation-interaction.scala. - gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") - val gadtApprox = Inferencing.approximateGADT(wtp) - gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") - if pt.isMatchedBy(gadtApprox) then - gadts.println(i"Member selection healed by GADT approximation") - tree.cast(gadtApprox) - else tree - else if tree.tpe.derivesFrom(defn.PairClass) && !defn.isTupleNType(tree.tpe.widenDealias) then - // If this is a generic tuple we need to cast it to make the TupleN/ members accessible. - // This works only for generic tuples of known size up to 22. - defn.tupleTypes(tree.tpe.widenTermRefExpr) match - case Some(elems) if elems.length <= Definitions.MaxTupleArity => - tree.cast(defn.tupleType(elems)) - case _ => tree - else tree // other adaptations for selections are handled in typedSelect + case _: SelectionProto => + tree // adaptations for selections are handled in typedSelect case _ if ctx.mode.is(Mode.ImplicitsEnabled) && tree.tpe.isValueType => if pt.isRef(defn.AnyValClass, skipRefined = false) || pt.isRef(defn.ObjectClass, skipRefined = false) @@ -3963,7 +4160,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case SearchSuccess(found, _, _, isExtension) => if isExtension then found else - checkImplicitConversionUseOK(found) + checkImplicitConversionUseOK(found, pt) withoutMode(Mode.ImplicitsEnabled)(readapt(found)) case failure: SearchFailure => if (pt.isInstanceOf[ProtoType] && !failure.isAmbiguous) then @@ -4151,6 +4348,59 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedExpr(cmp, defn.BooleanType) case _ => + private def checkInterestingResultInStatement(t: Tree)(using Context): Boolean = { + def isUninterestingSymbol(sym: Symbol): Boolean = + sym == NoSymbol || + sym.isConstructor || + sym.is(Package) || + sym.isPackageObject || + sym == defn.BoxedUnitClass || + sym == defn.AnyClass || + sym == defn.AnyRefAlias || + sym == defn.AnyValClass + def isUninterestingType(tpe: Type): Boolean = + tpe == NoType || + tpe.typeSymbol == defn.UnitClass || + defn.isBottomClass(tpe.typeSymbol) || + tpe =:= defn.UnitType || + tpe.typeSymbol == defn.BoxedUnitClass || + tpe =:= defn.AnyValType || + tpe =:= defn.AnyType || + tpe =:= defn.AnyRefType + def isJavaApplication(t: Tree): Boolean = t match { + case Apply(f, _) => f.symbol.is(JavaDefined) && !defn.ObjectClass.isSubClass(f.symbol.owner) + case _ => false + } + def checkInterestingShapes(t: Tree): Boolean = t match { + case If(_, thenpart, elsepart) => checkInterestingShapes(thenpart) || checkInterestingShapes(elsepart) + case Block(_, res) => checkInterestingShapes(res) + case Match(_, cases) => cases.exists(k => checkInterestingShapes(k.body)) + case _ => checksForInterestingResult(t) + } + def checksForInterestingResult(t: Tree): Boolean = ( + !t.isDef // ignore defs + && !isUninterestingSymbol(t.symbol) // ctors, package, Unit, Any + && !isUninterestingType(t.tpe) // bottom types, Unit, Any + && !isThisTypeResult(t) // buf += x + && !isSuperConstrCall(t) // just a thing + && !isJavaApplication(t) // Java methods are inherently side-effecting + // && !treeInfo.hasExplicitUnit(t) // suppressed by explicit expr: Unit // TODO Should explicit `: Unit` be added as warning suppression? + ) + if ctx.settings.WNonUnitStatement.value && !ctx.isAfterTyper && checkInterestingShapes(t) then + val where = t match { + case Block(_, res) => res + case If(_, thenpart, Literal(Constant(()))) => + thenpart match { + case Block(_, res) => res + case _ => thenpart + } + case _ => t + } + report.warning(UnusedNonUnitValue(where.tpe), t.srcPos) + true + else false + } + private def checkStatementPurity(tree: tpd.Tree)(original: untpd.Tree, exprOwner: Symbol)(using Context): Unit = if !tree.tpe.isErroneous && !ctx.isAfterTyper @@ -4195,11 +4445,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => if !config.Feature.scala2ExperimentalMacroEnabled then report.error( - """Scala 2 macro definition needs to be enabled - |by making the implicit value scala.language.experimental.macros visible. - |This can be achieved by adding the import clause 'import scala.language.experimental.macros' - |or by setting the compiler option -language:experimental.macros. - """.stripMargin, call.srcPos) + em"""Scala 2 macro definition needs to be enabled + |by making the implicit value scala.language.experimental.macros visible. + |This can be achieved by adding the import clause 'import scala.language.experimental.macros' + |or by setting the compiler option -language:experimental.macros. + """, + call.srcPos) call match case call: untpd.Ident => typedIdent(call, defn.AnyType) @@ -4214,7 +4465,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedTypeApply(call2, defn.AnyType) } case _ => - report.error("Invalid Scala 2 macro " + call.show, call.srcPos) + report.error(em"Invalid Scala 2 macro $call", call.srcPos) EmptyTree else typedExpr(call, defn.AnyType) @@ -4244,7 +4495,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // this is needed for -Ycheck. Without the annotation Ycheck will // skolemize the result type which will lead to different types before // and after checking. See i11955.scala. - AnnotatedType(conj, Annotation(defn.UncheckedStableAnnot)) + AnnotatedType(conj, Annotation(defn.UncheckedStableAnnot, tree.symbol.span)) else conj else pt gadts.println(i"insert GADT cast from $tree to $target") diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index 53646558cf5c..bcfc9288d862 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -164,11 +164,11 @@ class VarianceChecker(using Context) { i"\n${hl("enum case")} ${towner.name} requires explicit declaration of $tvar to resolve this issue.\n$example" else "" - i"${varianceLabel(tvar.flags)} $tvar occurs in ${varianceLabel(required)} position in type ${sym.info} of $sym$enumAddendum" + em"${varianceLabel(tvar.flags)} $tvar occurs in ${varianceLabel(required)} position in type ${sym.info} of $sym$enumAddendum" if (migrateTo3 && (sym.owner.isConstructor || sym.ownersIterator.exists(_.isAllOf(ProtectedLocal)))) report.migrationWarning( - s"According to new variance rules, this is no longer accepted; need to annotate with @uncheckedVariance:\n$msg", + msg.prepend("According to new variance rules, this is no longer accepted; need to annotate with @uncheckedVariance\n"), pos) // patch(Span(pos.end), " @scala.annotation.unchecked.uncheckedVariance") // Patch is disabled until two TODOs are solved: diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala index 471b68d6247e..cde1a63f5293 100644 --- a/compiler/src/dotty/tools/dotc/util/Chars.scala +++ b/compiler/src/dotty/tools/dotc/util/Chars.scala @@ -1,21 +1,20 @@ package dotty.tools.dotc.util import scala.annotation.switch -import java.lang.{Character => JCharacter} -import java.lang.Character.LETTER_NUMBER -import java.lang.Character.LOWERCASE_LETTER -import java.lang.Character.OTHER_LETTER -import java.lang.Character.TITLECASE_LETTER -import java.lang.Character.UPPERCASE_LETTER +import Character.{LETTER_NUMBER, LOWERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, UPPERCASE_LETTER} +import Character.{MATH_SYMBOL, OTHER_SYMBOL} +import Character.{isJavaIdentifierPart, isUnicodeIdentifierStart, isUnicodeIdentifierPart} /** Contains constants and classifier methods for characters */ -object Chars { +object Chars: inline val LF = '\u000A' inline val FF = '\u000C' inline val CR = '\u000D' inline val SU = '\u001A' + type CodePoint = Int + /** Convert a character digit to an Int according to given base, * -1 if no success */ @@ -59,17 +58,21 @@ object Chars { '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' /** Can character start an alphanumeric Scala identifier? */ - def isIdentifierStart(c: Char): Boolean = - (c == '_') || (c == '$') || JCharacter.isUnicodeIdentifierStart(c) + def isIdentifierStart(c: Char): Boolean = (c == '_') || (c == '$') || isUnicodeIdentifierStart(c) + def isIdentifierStart(c: CodePoint): Boolean = (c == '_') || (c == '$') || isUnicodeIdentifierStart(c) /** Can character form part of an alphanumeric Scala identifier? */ - def isIdentifierPart(c: Char): Boolean = - (c == '$') || JCharacter.isUnicodeIdentifierPart(c) + def isIdentifierPart(c: Char): Boolean = (c == '$') || isUnicodeIdentifierPart(c) + def isIdentifierPart(c: CodePoint) = (c == '$') || isUnicodeIdentifierPart(c) /** Is character a math or other symbol in Unicode? */ def isSpecial(c: Char): Boolean = { - val chtp = JCharacter.getType(c) - chtp == JCharacter.MATH_SYMBOL.toInt || chtp == JCharacter.OTHER_SYMBOL.toInt + val chtp = Character.getType(c) + chtp == MATH_SYMBOL.toInt || chtp == OTHER_SYMBOL.toInt + } + def isSpecial(codePoint: CodePoint) = { + val chtp = Character.getType(codePoint) + chtp == MATH_SYMBOL.toInt || chtp == OTHER_SYMBOL.toInt } def isValidJVMChar(c: Char): Boolean = @@ -78,15 +81,26 @@ object Chars { def isValidJVMMethodChar(c: Char): Boolean = !(c == '.' || c == ';' || c =='[' || c == '/' || c == '<' || c == '>') - private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_' - private final val letterGroups = { - import JCharacter._ - Set[Byte](LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER) - } - def isScalaLetter(ch: Char): Boolean = letterGroups(JCharacter.getType(ch).toByte) || otherLetters(ch) + def isScalaLetter(c: Char): Boolean = + Character.getType(c: @switch) match { + case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true + case _ => c == '$' || c == '_' + } + def isScalaLetter(c: CodePoint): Boolean = + Character.getType(c: @switch) match { + case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true + case _ => c == '$' || c == '_' + } /** Can character form part of a Scala operator name? */ - def isOperatorPart(c : Char) : Boolean = (c: @switch) match { + def isOperatorPart(c: Char): Boolean = (c: @switch) match { + case '~' | '!' | '@' | '#' | '%' | + '^' | '*' | '+' | '-' | '<' | + '>' | '?' | ':' | '=' | '&' | + '|' | '/' | '\\' => true + case c => isSpecial(c) + } + def isOperatorPart(c: CodePoint): Boolean = (c: @switch) match { case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | @@ -95,5 +109,4 @@ object Chars { } /** Would the character be encoded by `NameTransformer.encode`? */ - def willBeEncoded(c : Char) : Boolean = !JCharacter.isJavaIdentifierPart(c) -} + def willBeEncoded(c: Char): Boolean = !isJavaIdentifierPart(c) diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index fd6518fcc15c..a21a4af37038 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -42,9 +42,10 @@ abstract class GenericHashMap[Key, Value] else 1 << (32 - Integer.numberOfLeadingZeros(n)) /** Remove all elements from this table and set back to initial configuration */ - def clear(): Unit = + def clear(resetToInitial: Boolean): Unit = used = 0 - allocate(roundToPower(initialCapacity)) + if resetToInitial then allocate(roundToPower(initialCapacity)) + else java.util.Arrays.fill(table, null) /** The number of elements in the set */ def size: Int = used diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index a524dd39a594..a6e1532c804f 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -44,11 +44,10 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu else if Integer.bitCount(n) == 1 then n else 1 << (32 - Integer.numberOfLeadingZeros(n)) - /** Remove all elements from this set and set back to initial configuration */ - def clear(): Unit = { + def clear(resetToInitial: Boolean): Unit = used = 0 - allocate(roundToPower(initialCapacity)) - } + if resetToInitial then allocate(roundToPower(initialCapacity)) + else java.util.Arrays.fill(table, null) /** The number of elements in the set */ def size: Int = used diff --git a/compiler/src/dotty/tools/dotc/util/MutableMap.scala b/compiler/src/dotty/tools/dotc/util/MutableMap.scala index ba912a312aea..283e28e7e04f 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableMap.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableMap.scala @@ -13,6 +13,10 @@ abstract class MutableMap[Key, Value] extends ReadOnlyMap[Key, Value]: remove(k) this - def clear(): Unit + /** Remove all bindings from this map. + * @param resetToInitial If true, set back to initial configuration, which includes + * reallocating tables. + */ + def clear(resetToInitial: Boolean = true): Unit def getOrElseUpdate(key: Key, value: => Value): Value diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala index 6e3ae7628eb6..9529262fa5ec 100644 --- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala +++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala @@ -15,7 +15,11 @@ abstract class MutableSet[T] extends ReadOnlySet[T]: /** Remove element `x` from the set */ def -=(x: T): Unit - def clear(): Unit + /** Remove all elements from this set. + * @param resetToInitial If true, set back to initial configuration, which includes + * reallocating tables. + */ + def clear(resetToInitial: Boolean = true): Unit def ++= (xs: IterableOnce[T]): Unit = xs.iterator.foreach(this += _) diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index ddf89e7dd04d..5513a1f803c6 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -54,7 +54,7 @@ object Signatures { * Extract (current parameter index, function index, functions) method call for given position. * * @param path The path to the function application - * @param span The position of the cursor + * @param pos The position of the cursor * * @return A triple containing the index of the parameter being edited, the index of functeon * being called, the list of overloads of this function). diff --git a/compiler/src/dotty/tools/dotc/util/Spans.scala b/compiler/src/dotty/tools/dotc/util/Spans.scala index baf2cfa121b0..e1487408f36b 100644 --- a/compiler/src/dotty/tools/dotc/util/Spans.scala +++ b/compiler/src/dotty/tools/dotc/util/Spans.scala @@ -86,7 +86,6 @@ object Spans { || containsInner(this, that.end) || containsInner(that, this.start) || containsInner(that, this.end) - || this.start == that.start && this.end == that.end // exact match in one point ) } @@ -182,6 +181,7 @@ object Spans { assert(isSpan) if (this == NoCoord) NoSpan else Span(-1 - encoding) } + override def toString = if isSpan then s"$toSpan" else s"Coord(idx=$toIndex)" } /** An index coordinate */ diff --git a/compiler/src/dotty/tools/dotc/util/StackTraceOps.scala b/compiler/src/dotty/tools/dotc/util/StackTraceOps.scala index 7fa54606c572..f991005f0c43 100644 --- a/compiler/src/dotty/tools/dotc/util/StackTraceOps.scala +++ b/compiler/src/dotty/tools/dotc/util/StackTraceOps.scala @@ -29,7 +29,6 @@ object StackTraceOps: * If a stack trace is truncated, it will be followed by a line of the form * `... 3 elided`, by analogy to the lines `... 3 more` which indicate * shared stack trace segments. - * @param e the exception * @param p the predicate to select the prefix */ def formatStackTracePrefix(p: StackTraceElement => Boolean): String = diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index f04957f26400..e9b72015b202 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -55,15 +55,14 @@ import collection.mutable } def maybeMonitored[T](op: => T)(using Context): T = - if (ctx.settings.YdetailedStats.value && hits.nonEmpty) { + if ctx.settings.YdetailedStats.value then monitored = true try op - finally { - aggregate() - println() - println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n") - hits.clear() - } - } + finally + if hits.nonEmpty then + aggregate() + println() + println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n") + hits.clear() else op } diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala index 3c23b181a041..975826a87a37 100644 --- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala @@ -204,7 +204,7 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do linkedListLoop(null, table(bucket)) } - def clear(): Unit = { + def clear(resetToInitial: Boolean): Unit = { table = new Array[Entry[A] | Null](table.size) threshold = computeThreshold count = 0 diff --git a/compiler/src/dotty/tools/dotc/util/concurrent.scala b/compiler/src/dotty/tools/dotc/util/concurrent.scala new file mode 100644 index 000000000000..2710aae6c9b0 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/concurrent.scala @@ -0,0 +1,62 @@ +package dotty.tools.dotc.util +import scala.util.{Try, Failure, Success} +import scala.collection.mutable.ArrayBuffer + +object concurrent: + + class NoCompletion extends RuntimeException + + class Future[T](exec: Executor[T]): + private var result: Option[Try[T]] = None + def force: Try[T] = synchronized { + while result.isEmpty && exec.isAlive do wait(1000 /*ms*/) + result.getOrElse(Failure(NoCompletion())) + } + def complete(r: Try[T]): Unit = synchronized { + result = Some(r) + notifyAll() + } + end Future + + class Executor[T] extends Thread: + private type WorkItem = (Future[T], () => T) + + private var allScheduled = false + private val pending = new ArrayBuffer[WorkItem] + + def schedule(op: () => T): Future[T] = synchronized { + assert(!allScheduled) + val f = Future[T](this) + pending += ((f, op)) + notifyAll() + f + } + + def close(): Unit = synchronized { + allScheduled = true + notifyAll() + } + + private def nextPending(): Option[WorkItem] = synchronized { + while pending.isEmpty && !allScheduled do wait(1000 /*ms*/) + if pending.isEmpty then None + else + val item = pending.head + pending.dropInPlace(1) + Some(item) + } + + override def run(): Unit = + while + nextPending() match + case Some((f, op)) => + f.complete(Try(op())) + true + case None => + false + do () + end Executor +end concurrent + + + diff --git a/compiler/src/dotty/tools/dotc/util/lrutest.sc b/compiler/src/dotty/tools/dotc/util/lrutest.sc index 6e6328b248e3..9c811a65a70a 100644 --- a/compiler/src/dotty/tools/dotc/util/lrutest.sc +++ b/compiler/src/dotty/tools/dotc/util/lrutest.sc @@ -15,12 +15,12 @@ object lrutest { cache.last //> res4: Int = 6 cache lookup "hi" //> res5: String = x cache.indices.take(10).toList //> res6: List[Int] = List(7, 0, 1, 2, 3, 4, 5, 6, 7, 0) - + for (i <- 1 to 10) { if (cache.lookup(i.toString) == null) cache.enter(i.toString, i.toString) } - + cache.indices.take(10).toList //> res7: List[Int] = List(5, 6, 7, 0, 1, 2, 3, 4, 5, 6) cache //> res8: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(10 -> 10, 9 - //| > 9, 8 -> 8, 7 -> 7, 6 -> 6, 5 -> 5, 4 -> 4, 3 -> 3) @@ -35,6 +35,6 @@ object lrutest { //| > 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4, 3 -> 3) cache.lookup("11") //> res16: String = null cache.enter("11", "!!") - cache //> res17: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(11 -> !!, 5 + cache //> res17: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(11 -> !!, 5 //| -> 5, 10 -> 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4) } \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/util/optional.scala b/compiler/src/dotty/tools/dotc/util/optional.scala new file mode 100644 index 000000000000..cb62315d3c98 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/optional.scala @@ -0,0 +1,19 @@ +package dotty.tools.dotc.util + +import scala.util.boundary + +/** Return type that indicates that the method returns a T or aborts to the enclosing boundary with a `None` */ +type optional[T] = boundary.Label[None.type] ?=> T + +/** A prompt for `Option`, which establishes a boundary which `_.?` on `Option` can return */ +object optional: + inline def apply[T](inline body: optional[T]): Option[T] = + boundary(Some(body)) + + extension [T](r: Option[T]) + inline def ? (using label: boundary.Label[None.type]): T = r match + case Some(x) => x + case None => boundary.break(None) + + inline def break()(using label: boundary.Label[None.type]): Nothing = + boundary.break(None) diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index 29bc764dcd7b..f34fe6f40b9c 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -260,8 +260,10 @@ abstract class AbstractFile extends Iterable[AbstractFile] { // a race condition in creating the entry after the failed lookup may throw val path = jpath.resolve(name) - if (isDir) Files.createDirectory(path) - else Files.createFile(path) + try + if (isDir) Files.createDirectory(path) + else Files.createFile(path) + catch case _: FileAlreadyExistsException => () new PlainFile(new File(path)) case lookup => lookup } diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index 754c2bae3597..b45de57f9850 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -9,8 +9,7 @@ package io import scala.language.unsafeNulls -import java.net.MalformedURLException -import java.net.URL +import java.net.{MalformedURLException, URI, URISyntaxException, URL} import java.util.regex.PatternSyntaxException import File.pathSeparator @@ -182,8 +181,8 @@ object ClassPath { } def specToURL(spec: String): Option[URL] = - try Some(new URL(spec)) - catch { case _: MalformedURLException => None } + try Some(new URI(spec).toURL) + catch case _: MalformedURLException | _: URISyntaxException => None def manifests: List[java.net.URL] = { import scala.jdk.CollectionConverters.EnumerationHasAsScala diff --git a/compiler/src/dotty/tools/io/JDK9Reflectors.java b/compiler/src/dotty/tools/io/JDK9Reflectors.java index 1b0ce5deabab..9816cc03f92a 100644 --- a/compiler/src/dotty/tools/io/JDK9Reflectors.java +++ b/compiler/src/dotty/tools/io/JDK9Reflectors.java @@ -32,7 +32,7 @@ public final class JDK9Reflectors { } // Classes from java.lang.Runtime are not available in JDK 8 so using them explicitly would prevent this file from compiling with JDK 8 - // but these methods are not called in runtime when using this version of JDK + // but these methods are not called in runtime when using this version of JDK public static /*java.lang.Runtime.Version*/ Object runtimeVersionParse(String string) { try { diff --git a/compiler/src/dotty/tools/package.scala b/compiler/src/dotty/tools/package.scala index 57a58151acc7..f90355b1fa8e 100644 --- a/compiler/src/dotty/tools/package.scala +++ b/compiler/src/dotty/tools/package.scala @@ -1,10 +1,6 @@ package dotty package object tools { - // Ensure this object is already classloaded, since it's only actually used - // when handling stack overflows and every operation (including class loading) - // risks failing. - dotty.tools.dotc.core.handleRecursive val ListOfNil: List[Nil.type] = Nil :: Nil @@ -18,7 +14,7 @@ package object tools { * Flow-typing under explicit nulls will automatically insert many necessary * occurrences of uncheckedNN. */ - inline def uncheckedNN: T = x.asInstanceOf[T] + transparent inline def uncheckedNN: T = x.asInstanceOf[T] inline def toOption: Option[T] = if x == null then None else Some(x.asInstanceOf[T]) @@ -49,4 +45,9 @@ package object tools { val e = if msg == null then AssertionError() else AssertionError("assertion failed: " + msg) e.setStackTrace(Array()) throw e -} + + // Ensure this object is already classloaded, since it's only actually used + // when handling stack overflows and every operation (including class loading) + // risks failing. + dotty.tools.dotc.core.handleRecursive + } diff --git a/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala b/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala index 89fd290f7286..7a457a1d7546 100644 --- a/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala +++ b/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala @@ -23,6 +23,9 @@ import java.util.Collections class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) extends ClassLoader(parent): private def findAbstractFile(name: String) = root.lookupPath(name.split('/').toIndexedSeq, directory = false) + // on JDK 20 the URL constructor we're using is deprecated, + // but the recommended replacement, URL.of, doesn't exist on JDK 8 + @annotation.nowarn("cat=deprecation") override protected def findResource(name: String) = findAbstractFile(name) match case null => null diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala index 9da12ae955d1..8e048d786ae1 100644 --- a/compiler/src/dotty/tools/repl/JLineTerminal.scala +++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala @@ -16,7 +16,7 @@ import org.jline.reader.impl.history.DefaultHistory import org.jline.terminal.TerminalBuilder import org.jline.utils.AttributedString -final class JLineTerminal extends java.io.Closeable { +class JLineTerminal extends java.io.Closeable { // import java.util.logging.{Logger, Level} // Logger.getLogger("org.jline").setLevel(Level.FINEST) @@ -30,7 +30,8 @@ final class JLineTerminal extends java.io.Closeable { private def blue(str: String)(using Context) = if (ctx.settings.color.value != "never") Console.BLUE + str + Console.RESET else str - private def prompt(using Context) = blue("\nscala> ") + protected def promptStr = "scala" + private def prompt(using Context) = blue(s"\n$promptStr> ") private def newLinePrompt(using Context) = blue(" | ") /** Blockingly read line from `System.in` diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index 64e7ab72d3dd..c647ef302bb9 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -3,18 +3,14 @@ package repl import scala.language.unsafeNulls -import java.lang.{ ClassLoader, ExceptionInInitializerError } -import java.lang.reflect.InvocationTargetException - -import dotc.core.Contexts._ -import dotc.core.Denotations.Denotation -import dotc.core.Flags -import dotc.core.Flags._ -import dotc.core.Symbols.{Symbol, defn} -import dotc.core.StdNames.{nme, str} -import dotc.printing.ReplPrinter -import dotc.reporting.Diagnostic -import dotc.transform.ValueClasses +import dotc.*, core.* +import Contexts.*, Denotations.*, Flags.*, NameOps.*, StdNames.*, Symbols.* +import printing.ReplPrinter +import reporting.Diagnostic +import transform.ValueClasses +import util.StackTraceOps.* + +import scala.util.control.NonFatal /** This rendering object uses `ClassLoader`s to accomplish crossing the 4th * wall (i.e. fetching back values from the compiled class files put into a @@ -28,10 +24,10 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): import Rendering._ - private var myClassLoader: AbstractFileClassLoader = _ + var myClassLoader: AbstractFileClassLoader = _ /** (value, maxElements, maxCharacters) => String */ - private var myReplStringOf: (Object, Int, Int) => String = _ + var myReplStringOf: (Object, Int, Int) => String = _ /** Class loader used to load compiled code */ private[repl] def classLoader()(using Context) = @@ -131,8 +127,7 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): */ private def rewrapValueClass(sym: Symbol, value: Object)(using Context): Option[Object] = if ValueClasses.isDerivedValueClass(sym) then - val valueClassName = sym.flatName.encode.toString - val valueClass = Class.forName(valueClassName, true, classLoader()) + val valueClass = Class.forName(sym.binaryClassName, true, classLoader()) valueClass.getConstructors.headOption.map(_.newInstance(value)) else Some(value) @@ -148,7 +143,7 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): infoDiagnostic(d.symbol.showUser, d) /** Render value definition result */ - def renderVal(d: Denotation)(using Context): Either[InvocationTargetException, Option[Diagnostic]] = + def renderVal(d: Denotation)(using Context): Either[ReflectiveOperationException, Option[Diagnostic]] = val dcl = d.symbol.showUser def msg(s: String) = infoDiagnostic(s, d) try @@ -156,12 +151,11 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): if d.symbol.is(Flags.Lazy) then Some(msg(dcl)) else valueOf(d.symbol).map(value => msg(s"$dcl = $value")) ) - catch case e: InvocationTargetException => Left(e) + catch case e: ReflectiveOperationException => Left(e) end renderVal /** Force module initialization in the absence of members. */ def forceModule(sym: Symbol)(using Context): Seq[Diagnostic] = - import scala.util.control.NonFatal def load() = val objectName = sym.fullName.encode.toString Class.forName(objectName, true, classLoader()) @@ -169,14 +163,11 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): try load() catch case e: ExceptionInInitializerError => List(renderError(e, sym.denot)) - case NonFatal(e) => List(renderError(InvocationTargetException(e), sym.denot)) + case NonFatal(e) => List(renderError(e, sym.denot)) /** Render the stack trace of the underlying exception. */ - def renderError(ite: InvocationTargetException | ExceptionInInitializerError, d: Denotation)(using Context): Diagnostic = - import dotty.tools.dotc.util.StackTraceOps._ - val cause = ite.getCause match - case e: ExceptionInInitializerError => e.getCause - case e => e + def renderError(thr: Throwable, d: Denotation)(using Context): Diagnostic = + val cause = rootCause(thr) // detect //at repl$.rs$line$2$.(rs$line$2:1) //at repl$.rs$line$2.res1(rs$line$2) @@ -190,7 +181,6 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): private def infoDiagnostic(msg: String, d: Denotation)(using Context): Diagnostic = new Diagnostic.Info(msg, d.symbol.sourcePos) - object Rendering: final val REPL_WRAPPER_NAME_PREFIX = str.REPL_SESSION_LINE @@ -200,3 +190,12 @@ object Rendering: val text = printer.dclText(s) text.mkString(ctx.settings.pageWidth.value, ctx.settings.printLines.value) } + + def rootCause(x: Throwable): Throwable = x match + case _: ExceptionInInitializerError | + _: java.lang.reflect.InvocationTargetException | + _: java.lang.reflect.UndeclaredThrowableException | + _: java.util.concurrent.ExecutionException + if x.getCause != null => + rootCause(x.getCause) + case _ => x diff --git a/compiler/src/dotty/tools/repl/ReplCompiler.scala b/compiler/src/dotty/tools/repl/ReplCompiler.scala index 8db288f50aca..764695e8479b 100644 --- a/compiler/src/dotty/tools/repl/ReplCompiler.scala +++ b/compiler/src/dotty/tools/repl/ReplCompiler.scala @@ -62,8 +62,8 @@ class ReplCompiler extends Compiler: } val rootCtx = super.rootContext.fresh - .setOwner(defn.EmptyPackageClass) .withRootImports + .fresh.setOwner(defn.EmptyPackageClass): Context (state.validObjectIndexes).foldLeft(rootCtx)((ctx, id) => importPreviousRun(id)(using ctx)) } diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 4fab4b119a08..905f4f06de08 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -37,6 +37,7 @@ import org.jline.reader._ import scala.annotation.tailrec import scala.collection.mutable import scala.jdk.CollectionConverters._ +import scala.util.control.NonFatal import scala.util.Using /** The state of the REPL contains necessary bindings instead of having to have @@ -118,7 +119,7 @@ class ReplDriver(settings: Array[String], private var rootCtx: Context = _ private var shouldStart: Boolean = _ private var compiler: ReplCompiler = _ - private var rendering: Rendering = _ + protected var rendering: Rendering = _ // initialize the REPL session as part of the constructor so that once `run` // is called, we're in business @@ -138,7 +139,7 @@ class ReplDriver(settings: Array[String], * observable outside of the CLI, for this reason, most helper methods are * `protected final` to facilitate testing. */ - final def runUntilQuit(using initialState: State = initialState)(): State = { + def runUntilQuit(using initialState: State = initialState)(): State = { val terminal = new JLineTerminal out.println( @@ -176,24 +177,44 @@ class ReplDriver(settings: Array[String], interpret(ParseResult.complete(input)) } - private def runBody(body: => State): State = rendering.classLoader()(using rootCtx).asContext(withRedirectedOutput(body)) + final def runQuietly(input: String)(using State): State = runBody { + val parsed = ParseResult(input) + interpret(parsed, quiet = true) + } + + protected def runBody(body: => State): State = rendering.classLoader()(using rootCtx).asContext(withRedirectedOutput(body)) // TODO: i5069 final def bind(name: String, value: Any)(using state: State): State = state + /** + * Controls whether the `System.out` and `System.err` streams are set to the provided constructor parameter instance + * of [[java.io.PrintStream]] during the execution of the repl. On by default. + * + * Disabling this can be beneficial when executing a repl instance inside a concurrent environment, for example a + * thread pool (such as the Scala compile server in the Scala Plugin for IntelliJ IDEA). + * + * In such environments, indepently executing `System.setOut` and `System.setErr` without any synchronization can + * lead to unpredictable results when restoring the original streams (dependent on the order of execution), leaving + * the Java process in an inconsistent state. + */ + protected def redirectOutput: Boolean = true + // redirecting the output allows us to test `println` in scripted tests private def withRedirectedOutput(op: => State): State = { - val savedOut = System.out - val savedErr = System.err - try { - System.setOut(out) - System.setErr(out) - op - } - finally { - System.setOut(savedOut) - System.setErr(savedErr) - } + if redirectOutput then + val savedOut = System.out + val savedErr = System.err + try { + System.setOut(out) + System.setErr(out) + op + } + finally { + System.setOut(savedOut) + System.setErr(savedErr) + } + else op } private def newRun(state: State, reporter: StoreReporter = newStoreReporter) = { @@ -236,16 +257,16 @@ class ReplDriver(settings: Array[String], unit.tpdTree = tree given Context = state.context.fresh.setCompilationUnit(unit) val srcPos = SourcePosition(file, Span(cursor)) - val (_, completions) = Completion.completions(srcPos) + val completions = try Completion.completions(srcPos)._2 catch case NonFatal(_) => Nil completions.map(_.label).distinct.map(makeCandidate) } .getOrElse(Nil) end completions - private def interpret(res: ParseResult)(using state: State): State = { + protected def interpret(res: ParseResult, quiet: Boolean = false)(using state: State): State = { res match { case parsed: Parsed if parsed.trees.nonEmpty => - compile(parsed, state) + compile(parsed, state, quiet) case SyntaxErrors(_, errs, _) => displayErrors(errs) @@ -263,7 +284,7 @@ class ReplDriver(settings: Array[String], } /** Compile `parsed` trees and evolve `state` in accordance */ - private def compile(parsed: Parsed, istate: State): State = { + private def compile(parsed: Parsed, istate: State, quiet: Boolean = false): State = { def extractNewestWrapper(tree: untpd.Tree): Name = tree match { case PackageDef(_, (obj: untpd.ModuleDef) :: Nil) => obj.name.moduleClassName case _ => nme.NO_NAME @@ -314,9 +335,11 @@ class ReplDriver(settings: Array[String], given Ordering[Diagnostic] = Ordering[(Int, Int, Int)].on(d => (d.pos.line, -d.level, d.pos.column)) - (definitions ++ warnings) - .sorted - .foreach(printDiagnostic) + if (!quiet) { + (definitions ++ warnings) + .sorted + .foreach(printDiagnostic) + } updatedState } diff --git a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala index 3c8c51d8d6b2..9ec0199abcbb 100644 --- a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala +++ b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala @@ -67,7 +67,7 @@ object ScalaClassLoader { @sharable private[this] val bootClassLoader: ClassLoader = if scala.util.Properties.isJavaAtLeast("9") then try - ClassLoader.getSystemClassLoader.getParent + ClassLoader.getSystemClassLoader.getParent catch case _: Throwable => null else null diff --git a/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala b/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala index b33ba14b9e70..5fac91124187 100644 --- a/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala @@ -20,6 +20,4 @@ final class ExprImpl(val tree: tpd.Tree, val scope: Scope) extends Expr[Any] { } override def hashCode(): Int = tree.hashCode() - - override def toString: String = "'{ ... }" } diff --git a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala index d85d92de5455..bfa4c1c6d1f2 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala @@ -1,7 +1,6 @@ package scala.quoted package runtime.impl - import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Flags.* @@ -9,6 +8,7 @@ import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.util.optional /** Matches a quoted tree against a quoted pattern tree. * A quoted pattern tree may have type and term holes in addition to normal terms. @@ -103,12 +103,13 @@ import dotty.tools.dotc.core.Symbols.* object QuoteMatcher { import tpd.* - // TODO improve performance - // TODO use flag from Context. Maybe -debug or add -debug-macros private inline val debug = false - import Matching._ + /** Sequence of matched expressions. + * These expressions are part of the scrutinee and will be bound to the quote pattern term splices. + */ + type MatchingExprs = Seq[MatchResult] /** A map relating equivalent symbols from the scrutinee and the pattern * For example in @@ -121,32 +122,34 @@ object QuoteMatcher { private def withEnv[T](env: Env)(body: Env ?=> T): T = body(using env) - def treeMatch(scrutineeTerm: Tree, patternTerm: Tree)(using Context): Option[Tuple] = + def treeMatch(scrutineeTree: Tree, patternTree: Tree)(using Context): Option[MatchingExprs] = given Env = Map.empty - scrutineeTerm =?= patternTerm + optional: + scrutineeTree =?= patternTree /** Check that all trees match with `mtch` and concatenate the results with &&& */ - private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => Matching): Matching = (l1, l2) match { + private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => MatchingExprs): optional[MatchingExprs] = (l1, l2) match { case (x :: xs, y :: ys) => mtch(x, y) &&& matchLists(xs, ys)(mtch) case (Nil, Nil) => matched case _ => notMatched } extension (scrutinees: List[Tree]) - private def =?= (patterns: List[Tree])(using Env, Context): Matching = + private def =?= (patterns: List[Tree])(using Env, Context): optional[MatchingExprs] = matchLists(scrutinees, patterns)(_ =?= _) extension (scrutinee0: Tree) /** Check that the trees match and return the contents from the pattern holes. - * Return None if the trees do not match otherwise return Some of a tuple containing all the contents in the holes. + * Return a sequence containing all the contents in the holes. + * If it does not match, continues to the `optional` with `None`. * * @param scrutinee The tree being matched * @param pattern The pattern tree that the scrutinee should match. Contains `patternHole` holes. * @param `summon[Env]` Set of tuples containing pairs of symbols (s, p) where s defines a symbol in `scrutinee` which corresponds to symbol p in `pattern`. - * @return `None` if it did not match or `Some(tup: Tuple)` if it matched where `tup` contains the contents of the holes. + * @return The sequence with the contents of the holes of the matched expression. */ - private def =?= (pattern0: Tree)(using Env, Context): Matching = + private def =?= (pattern0: Tree)(using Env, Context): optional[MatchingExprs] = /* Match block flattening */ // TODO move to cases /** Normalize the tree */ @@ -203,31 +206,12 @@ object QuoteMatcher { // Matches an open term and wraps it into a lambda that provides the free variables case Apply(TypeApply(Ident(_), List(TypeTree())), SeqLiteral(args, _) :: Nil) if pattern.symbol.eq(defn.QuotedRuntimePatterns_higherOrderHole) => - def hoasClosure = { - val names: List[TermName] = args.map { - case Block(List(DefDef(nme.ANON_FUN, _, _, Apply(Ident(name), _))), _) => name.asTermName - case arg => arg.symbol.name.asTermName - } - val argTypes = args.map(x => x.tpe.widenTermRefExpr) - val methTpe = MethodType(names)(_ => argTypes, _ => pattern.tpe) - val meth = newAnonFun(ctx.owner, methTpe) - def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { - val argsMap = args.map(_.symbol).zip(lambdaArgss.head).toMap - val body = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = - tree match - case tree: Ident => summon[Env].get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) - case tree => super.transform(tree) - }.transform(scrutinee) - TreeOps(body).changeNonLocalOwners(meth) - } - Closure(meth, bodyFn) - } + val env = summon[Env] val capturedArgs = args.map(_.symbol) - val captureEnv = summon[Env].filter((k, v) => !capturedArgs.contains(v)) + val captureEnv = env.filter((k, v) => !capturedArgs.contains(v)) withEnv(captureEnv) { scrutinee match - case ClosedPatternTerm(scrutinee) => matched(hoasClosure) + case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, args, env) case _ => notMatched } @@ -317,7 +301,7 @@ object QuoteMatcher { /* Match new */ case New(tpt1) => pattern match - case New(tpt2) if tpt1.tpe.typeSymbol == tpt2.tpe.typeSymbol => matched + case New(tpt2) if tpt1.tpe.dealias.typeSymbol == tpt2.tpe.dealias.typeSymbol => matched case _ => notMatched /* Match this */ @@ -431,7 +415,6 @@ object QuoteMatcher { case _ => scrutinee val pattern = patternTree.symbol - devirtualizedScrutinee == pattern || summon[Env].get(devirtualizedScrutinee).contains(pattern) || devirtualizedScrutinee.allOverriddenSymbols.contains(pattern) @@ -452,32 +435,67 @@ object QuoteMatcher { accumulator.apply(Set.empty, term) } - /** Result of matching a part of an expression */ - private type Matching = Option[Tuple] - - private object Matching { - - def notMatched: Matching = None - - val matched: Matching = Some(Tuple()) - - def matched(tree: Tree)(using Context): Matching = - Some(Tuple1(new ExprImpl(tree, SpliceScope.getCurrent))) - - extension (self: Matching) - def asOptionOfTuple: Option[Tuple] = self - - /** Concatenates the contents of two successful matchings or return a `notMatched` */ - def &&& (that: => Matching): Matching = self match { - case Some(x) => - that match { - case Some(y) => Some(x ++ y) - case _ => None - } - case _ => None - } - end extension - - } + enum MatchResult: + /** Closed pattern extracted value + * @param tree Scrutinee sub-tree that matched + */ + case ClosedTree(tree: Tree) + /** HOAS pattern extracted value + * + * @param tree Scrutinee sub-tree that matched + * @param patternTpe Type of the pattern hole (from the pattern) + * @param args HOAS arguments (from the pattern) + * @param env Mapping between scrutinee and pattern variables + */ + case OpenTree(tree: Tree, patternTpe: Type, args: List[Tree], env: Env) + + /** Return the expression that was extracted from a hole. + * + * If it was a closed expression it returns that expression. Otherwise, + * if it is a HOAS pattern, the surrounding lambda is generated using + * `mapTypeHoles` to create the signature of the lambda. + * + * This expression is assumed to be a valid expression in the given splice scope. + */ + def toExpr(mapTypeHoles: TypeMap, spliceScope: Scope)(using Context): Expr[Any] = this match + case MatchResult.ClosedTree(tree) => + new ExprImpl(tree, spliceScope) + case MatchResult.OpenTree(tree, patternTpe, args, env) => + val names: List[TermName] = args.map { + case Block(List(DefDef(nme.ANON_FUN, _, _, Apply(Ident(name), _))), _) => name.asTermName + case arg => arg.symbol.name.asTermName + } + val paramTypes = args.map(x => mapTypeHoles(x.tpe.widenTermRefExpr)) + val methTpe = MethodType(names)(_ => paramTypes, _ => mapTypeHoles(patternTpe)) + val meth = newAnonFun(ctx.owner, methTpe) + def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { + val argsMap = args.view.map(_.symbol).zip(lambdaArgss.head).toMap + val body = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = + tree match + case tree: Ident => env.get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) + case tree => super.transform(tree) + }.transform(tree) + TreeOps(body).changeNonLocalOwners(meth) + } + val hoasClosure = Closure(meth, bodyFn) + new ExprImpl(hoasClosure, spliceScope) + + private inline def notMatched: optional[MatchingExprs] = + optional.break() + + private inline def matched: MatchingExprs = + Seq.empty + + private inline def matched(tree: Tree)(using Context): MatchingExprs = + Seq(MatchResult.ClosedTree(tree)) + + private def matchedOpen(tree: Tree, patternTpe: Type, args: List[Tree], env: Env)(using Context): MatchingExprs = + Seq(MatchResult.OpenTree(tree, patternTpe, args, env)) + + extension (self: MatchingExprs) + /** Concatenates the contents of two successful matchings */ + def &&& (that: MatchingExprs): MatchingExprs = self ++ that + end extension } diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index f8e439baeb0e..db4e3e6c6a05 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -8,15 +8,15 @@ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Annotations import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Types -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.NameKinds +import dotty.tools.dotc.core.NameOps._ import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.quoted.reflect._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Types import dotty.tools.dotc.NoCompilationUnit - -import dotty.tools.dotc.quoted.{MacroExpansion, PickledQuotes} +import dotty.tools.dotc.quoted.MacroExpansion +import dotty.tools.dotc.quoted.PickledQuotes +import dotty.tools.dotc.quoted.reflect._ import scala.quoted.runtime.{QuoteUnpickler, QuoteMatching} import scala.quoted.runtime.impl.printers._ @@ -242,6 +242,14 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def unapply(cdef: ClassDef): (String, DefDef, List[Tree /* Term | TypeTree */], Option[ValDef], List[Statement]) = val rhs = cdef.rhs.asInstanceOf[tpd.Template] (cdef.name.toString, cdef.constructor, cdef.parents, cdef.self, rhs.body) + + def module(module: Symbol, parents: List[Tree /* Term | TypeTree */], body: List[Statement]): (ValDef, ClassDef) = { + val cls = module.moduleClass + val clsDef = ClassDef(cls, parents, body) + val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) + val modVal = ValDef(module, Some(newCls)) + (modVal, clsDef) + } end ClassDef given ClassDefMethods: ClassDefMethods with @@ -267,12 +275,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object DefDef extends DefDefModule: def apply(symbol: Symbol, rhsFn: List[List[Tree]] => Option[Term]): DefDef = - assert(symbol.isTerm, s"expected a term symbol but received $symbol") + xCheckMacroAssert(symbol.isTerm, s"expected a term symbol but received $symbol") + xCheckMacroAssert(symbol.flags.is(Flags.Method), "expected a symbol with `Method` flag set") withDefaultPos(tpd.DefDef(symbol.asTerm, prefss => - xCheckMacroedOwners(xCheckMacroValidExpr(rhsFn(prefss)), symbol).getOrElse(tpd.EmptyTree) + xCheckedMacroOwners(xCheckMacroValidExpr(rhsFn(prefss)), symbol).getOrElse(tpd.EmptyTree) )) def copy(original: Tree)(name: String, paramss: List[ParamClause], tpt: TypeTree, rhs: Option[Term]): DefDef = - tpd.cpy.DefDef(original)(name.toTermName, paramss, tpt, xCheckMacroedOwners(rhs, original.symbol).getOrElse(tpd.EmptyTree)) + tpd.cpy.DefDef(original)(name.toTermName, paramss, tpt, xCheckedMacroOwners(rhs, original.symbol).getOrElse(tpd.EmptyTree)) def unapply(ddef: DefDef): (String, List[ParamClause], TypeTree, Option[Term]) = (ddef.name.toString, ddef.paramss, ddef.tpt, optional(ddef.rhs)) end DefDef @@ -298,9 +307,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object ValDef extends ValDefModule: def apply(symbol: Symbol, rhs: Option[Term]): ValDef = - tpd.ValDef(symbol.asTerm, xCheckMacroedOwners(xCheckMacroValidExpr(rhs), symbol).getOrElse(tpd.EmptyTree)) + xCheckMacroAssert(!symbol.flags.is(Flags.Method), "expected a symbol without `Method` flag set") + withDefaultPos(tpd.ValDef(symbol.asTerm, xCheckedMacroOwners(xCheckMacroValidExpr(rhs), symbol).getOrElse(tpd.EmptyTree))) def copy(original: Tree)(name: String, tpt: TypeTree, rhs: Option[Term]): ValDef = - tpd.cpy.ValDef(original)(name.toTermName, tpt, xCheckMacroedOwners(xCheckMacroValidExpr(rhs), original.symbol).getOrElse(tpd.EmptyTree)) + tpd.cpy.ValDef(original)(name.toTermName, tpt, xCheckedMacroOwners(xCheckMacroValidExpr(rhs), original.symbol).getOrElse(tpd.EmptyTree)) def unapply(vdef: ValDef): (String, TypeTree, Option[Term]) = (vdef.name.toString, vdef.tpt, optional(vdef.rhs)) @@ -362,16 +372,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Term extends TermModule: def betaReduce(tree: Term): Option[Term] = tree match - case app @ tpd.Apply(tpd.Select(fn, nme.apply), args) if dotc.core.Symbols.defn.isFunctionType(fn.tpe) => - val app1 = dotc.transform.BetaReduce(app, fn, args) - if app1 eq app then None - else Some(app1.withSpan(tree.span)) case tpd.Block(Nil, expr) => for e <- betaReduce(expr) yield tpd.cpy.Block(tree)(Nil, e) case tpd.Inlined(_, Nil, expr) => betaReduce(expr) case _ => - None + val tree1 = dotc.transform.BetaReduce(tree) + if tree1 eq tree then None + else Some(tree1.withSpan(tree.span)) + end Term given TermMethods: TermMethods with @@ -390,7 +399,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def etaExpand(owner: Symbol): Term = self.tpe.widen match { case mtpe: Types.MethodType if !mtpe.isParamDependent => val closureResType = mtpe.resType match { - case t: Types.MethodType => t.toFunctionType(isJava = self.symbol.is(JavaDefined)) + case t: Types.MethodType => t.toFunctionType(isJava = self.symbol.is(dotc.core.Flags.JavaDefined)) case t => t } val closureTpe = Types.MethodType(mtpe.paramNames, mtpe.paramInfos, closureResType) @@ -601,11 +610,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end extension end NamedArgMethods - type Apply = tpd.Apply + type Apply = tpd.Apply | tpd.Quote | tpd.Splice object ApplyTypeTest extends TypeTest[Tree, Apply]: def unapply(x: Tree): Option[Apply & x.type] = x match case x: (tpd.Apply & x.type) => Some(x) + case x: (tpd.Quote & x.type) => Some(x) // TODO expose Quote AST in Quotes + case x: (tpd.Splice & x.type) => Some(x) // TODO expose Splice AST in Quotes case _ => None end ApplyTypeTest @@ -622,8 +633,23 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler given ApplyMethods: ApplyMethods with extension (self: Apply) - def fun: Term = self.fun - def args: List[Term] = self.args + def fun: Term = self match + case self: tpd.Apply => self.fun + case self: tpd.Quote => // TODO expose Quote AST in Quotes + import dotty.tools.dotc.ast.tpd.TreeOps + tpd.ref(dotc.core.Symbols.defn.QuotedRuntime_exprQuote) + .appliedToType(self.bodyType) + .withSpan(self.span) + case self: tpd.Splice => // TODO expose Splice AST in Quotes + import dotty.tools.dotc.ast.tpd.TreeOps + tpd.ref(dotc.core.Symbols.defn.QuotedRuntime_exprSplice) + .appliedToType(self.tpe) + .withSpan(self.span) + + def args: List[Term] = self match + case self: tpd.Apply => self.args + case self: tpd.Quote => List(self.body) // TODO expose Quote AST in Quotes + case self: tpd.Splice => List(self.expr) // TODO expose Splice AST in Quotes end extension end ApplyMethods @@ -803,7 +829,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Lambda extends LambdaModule: def apply(owner: Symbol, tpe: MethodType, rhsFn: (Symbol, List[Tree]) => Tree): Block = val meth = dotc.core.Symbols.newAnonFun(owner, tpe) - withDefaultPos(tpd.Closure(meth, tss => xCheckMacroedOwners(xCheckMacroValidExpr(rhsFn(meth, tss.head.map(withDefaultPos))), meth))) + withDefaultPos(tpd.Closure(meth, tss => xCheckedMacroOwners(xCheckMacroValidExpr(rhsFn(meth, tss.head.map(withDefaultPos))), meth))) def unapply(tree: Block): Option[(List[ValDef], Term)] = tree match { case Block((ddef @ DefDef(_, tpd.ValDefs(params) :: Nil, _, Some(body))) :: Nil, Closure(meth, _)) @@ -1474,7 +1500,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Bind extends BindModule: def apply(sym: Symbol, pattern: Tree): Bind = - tpd.Bind(sym, pattern) + xCheckMacroAssert(sym.flags.is(Flags.Case), "expected a symbol with `Case` flag set") + withDefaultPos(tpd.Bind(sym, pattern)) def copy(original: Tree)(name: String, pattern: Tree): Bind = withDefaultPos(tpd.cpy.Bind(original)(name.toTermName, pattern)) def unapply(pattern: Bind): (String, Tree) = @@ -1573,8 +1600,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler self.nonEmpty && self.head.symbol.is(dotc.core.Flags.Implicit) def isGiven: Boolean = self.nonEmpty && self.head.symbol.is(dotc.core.Flags.Given) - def isErased: Boolean = - self.nonEmpty && self.head.symbol.is(dotc.core.Flags.Erased) + def isErased: Boolean = false + + def erasedArgs: List[Boolean] = + self.map(_.symbol.is(dotc.core.Flags.Erased)) + def hasErasedArgs: Boolean = + self.exists(_.symbol.is(dotc.core.Flags.Erased)) end TermParamClauseMethods type TypeParamClause = List[tpd.TypeDef] @@ -2131,9 +2162,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler given MethodTypeMethods: MethodTypeMethods with extension (self: MethodType) - def isErased: Boolean = self.isErasedMethod + def isErased: Boolean = false def isImplicit: Boolean = self.isImplicitMethod def param(idx: Int): TypeRepr = self.newParamRef(idx) + + def erasedParams: List[Boolean] = self.erasedParams + def hasErasedParams: Boolean = self.hasErasedParams end extension end MethodTypeMethods @@ -2159,11 +2193,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end extension end PolyTypeMethods - type TypeLambda = dotc.core.Types.TypeLambda + type TypeLambda = dotc.core.Types.HKTypeLambda object TypeLambdaTypeTest extends TypeTest[TypeRepr, TypeLambda]: def unapply(x: TypeRepr): Option[TypeLambda & x.type] = x match - case tpe: (Types.TypeLambda & x.type) => Some(tpe) + case tpe: (Types.HKTypeLambda & x.type) => Some(tpe) case _ => None end TypeLambdaTypeTest @@ -2395,7 +2429,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Implicits extends ImplicitsModule: def search(tpe: TypeRepr): ImplicitSearchResult = - ctx.typer.inferImplicitArg(tpe, Position.ofMacroExpansion.span) + import tpd.TreeOps + val implicitTree = ctx.typer.inferImplicitArg(tpe, Position.ofMacroExpansion.span) + // Make sure that we do not have any uninstantiated type variables. + // See tests/pos-macros/i16636. + // See tests/pos-macros/exprSummonWithTypeVar with -Xcheck-macros. + dotc.typer.Inferencing.fullyDefinedType(implicitTree.tpe, "", implicitTree) + implicitTree end Implicits type ImplicitSearchResult = Tree @@ -2481,15 +2521,47 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler for sym <- decls(cls) do cls.enter(sym) cls + def newModule(owner: Symbol, name: String, modFlags: Flags, clsFlags: Flags, parents: List[TypeRepr], decls: Symbol => List[Symbol], privateWithin: Symbol): Symbol = + assert(parents.nonEmpty && !parents.head.typeSymbol.is(dotc.core.Flags.Trait), "First parent must be a class") + assert(!privateWithin.exists || privateWithin.isType, "privateWithin must be a type symbol or `Symbol.noSymbol`") + val mod = dotc.core.Symbols.newNormalizedModuleSymbol( + owner, + name.toTermName, + modFlags | dotc.core.Flags.ModuleValCreationFlags, + clsFlags | dotc.core.Flags.ModuleClassCreationFlags, + parents, + dotc.core.Scopes.newScope, + privateWithin) + val cls = mod.moduleClass.asClass + cls.enter(dotc.core.Symbols.newConstructor(cls, dotc.core.Flags.Synthetic, Nil, Nil)) + for sym <- decls(cls) do cls.enter(sym) + mod + def newMethod(owner: Symbol, name: String, tpe: TypeRepr): Symbol = newMethod(owner, name, tpe, Flags.EmptyFlags, noSymbol) def newMethod(owner: Symbol, name: String, tpe: TypeRepr, flags: Flags, privateWithin: Symbol): Symbol = - dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Method, tpe, privateWithin) + xCheckMacroAssert(!privateWithin.exists || privateWithin.isType, "privateWithin must be a type symbol or `Symbol.noSymbol`") + val privateWithin1 = if privateWithin.isTerm then Symbol.noSymbol else privateWithin + checkValidFlags(flags.toTermFlags, Flags.validMethodFlags) + dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Method, tpe, privateWithin1) def newVal(owner: Symbol, name: String, tpe: TypeRepr, flags: Flags, privateWithin: Symbol): Symbol = - dotc.core.Symbols.newSymbol(owner, name.toTermName, flags, tpe, privateWithin) + xCheckMacroAssert(!privateWithin.exists || privateWithin.isType, "privateWithin must be a type symbol or `Symbol.noSymbol`") + val privateWithin1 = if privateWithin.isTerm then Symbol.noSymbol else privateWithin + checkValidFlags(flags.toTermFlags, Flags.validValFlags) + dotc.core.Symbols.newSymbol(owner, name.toTermName, flags, tpe, privateWithin1) def newBind(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol = - dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | Case, tpe) + checkValidFlags(flags.toTermFlags, Flags.validBindFlags) + dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Case, tpe) def noSymbol: Symbol = dotc.core.Symbols.NoSymbol + + private inline def checkValidFlags(inline flags: Flags, inline valid: Flags): Unit = + xCheckMacroAssert( + flags <= valid, + s"Received invalid flags. Expected flags ${flags.show} to only contain a subset of ${valid.show}." + ) + + def freshName(prefix: String): String = + NameKinds.MacroNames.fresh(prefix.toTermName).toString end Symbol given SymbolMethods: SymbolMethods with @@ -2513,6 +2585,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def name: String = self.denot.name.toString def fullName: String = self.denot.fullName.toString + def info: TypeRepr = self.denot.info + def pos: Option[Position] = if self.exists then Some(self.sourcePos) else None @@ -2558,7 +2632,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler self.isTerm && !self.is(dotc.core.Flags.Method) && !self.is(dotc.core.Flags.Case/*, FIXME add this check and fix sourcecode butNot = Enum | Module*/) def isDefDef: Boolean = self.is(dotc.core.Flags.Method) def isBind: Boolean = - self.is(dotc.core.Flags.Case, butNot = Enum | Module) && !self.isClass + self.is(dotc.core.Flags.Case, butNot = dotc.core.Flags.Enum | dotc.core.Flags.Module) && !self.isClass def isNoSymbol: Boolean = self == Symbol.noSymbol def exists: Boolean = self != Symbol.noSymbol @@ -2619,13 +2693,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler case sym if sym.isType => sym.asType }.toList - def memberType(name: String): Symbol = typeMember(name) + def memberType(name: String): Symbol = + self.typeRef.decls.find(sym => sym.name == name.toTypeName) def typeMember(name: String): Symbol = - self.unforcedDecls.find(sym => sym.name == name.toTypeName) + lookupPrefix.member(name.toTypeName).symbol - def memberTypes: List[Symbol] = typeMembers + def memberTypes: List[Symbol] = + self.typeRef.decls.filter(_.isType) def typeMembers: List[Symbol] = - self.unforcedDecls.filter(_.isType) + lookupPrefix.typeMembers.map(_.symbol).toList def declarations: List[Symbol] = self.typeRef.info.decls.toList @@ -2654,7 +2730,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def show(using printer: Printer[Symbol]): String = printer.show(self) - def asQuotes: Nested = new QuotesImpl(using ctx.withOwner(self)) + def asQuotes: Nested = + assert(self.ownersIterator.contains(ctx.owner), s"$self is not owned by ${ctx.owner}") + new QuotesImpl(using ctx.withOwner(self)) end extension @@ -2729,7 +2807,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def SomeModule: Symbol = dotc.core.Symbols.defn.SomeClass.companionModule def ProductClass: Symbol = dotc.core.Symbols.defn.ProductClass def FunctionClass(arity: Int, isImplicit: Boolean = false, isErased: Boolean = false): Symbol = - dotc.core.Symbols.defn.FunctionSymbol(arity, isImplicit, isErased) + if arity < 0 then throw IllegalArgumentException(s"arity: $arity") + if isErased then + throw new Exception("Erased function classes are not supported. Use a refined `scala.runtime.ErasedFunction`") + else dotc.core.Symbols.defn.FunctionSymbol(arity, isImplicit) + def ErasedFunctionClass = dotc.core.Symbols.defn.ErasedFunctionClass def TupleClass(arity: Int): Symbol = dotc.core.Symbols.defn.TupleType(arity).nn.classSymbol.asClass def isTupleClass(sym: Symbol): Boolean = @@ -2744,6 +2826,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Flags extends FlagsModule: def Abstract: Flags = dotc.core.Flags.Abstract + def AbsOverride: Flags = dotc.core.Flags.AbsOverride def Artifact: Flags = dotc.core.Flags.Artifact def Case: Flags = dotc.core.Flags.Case def CaseAccessor: Flags = dotc.core.Flags.CaseAccessor @@ -2765,6 +2848,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def Invisible: Flags = dotc.core.Flags.Invisible def JavaDefined: Flags = dotc.core.Flags.JavaDefined def JavaStatic: Flags = dotc.core.Flags.JavaStatic + def JavaAnnotation: Flags = dotc.core.Flags.JavaAnnotation def Lazy: Flags = dotc.core.Flags.Lazy def Local: Flags = dotc.core.Flags.Local def Macro: Flags = dotc.core.Flags.Macro @@ -2784,10 +2868,17 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def Scala2x: Flags = dotc.core.Flags.Scala2x def Sealed: Flags = dotc.core.Flags.Sealed def StableRealizable: Flags = dotc.core.Flags.StableRealizable - def Static: Flags = dotc.core.Flags.JavaStatic + @deprecated("Use JavaStatic instead", "3.3.0") def Static: Flags = dotc.core.Flags.JavaStatic def Synthetic: Flags = dotc.core.Flags.Synthetic def Trait: Flags = dotc.core.Flags.Trait def Transparent: Flags = dotc.core.Flags.Transparent + + // Keep: aligned with Quotes's `newMethod` doc + private[QuotesImpl] def validMethodFlags: Flags = Private | Protected | Override | Deferred | Final | Method | Implicit | Given | Local | AbsOverride | JavaStatic // Flags that could be allowed: Synthetic | ExtensionMethod | Exported | Erased | Infix | Invisible + // Keep: aligned with Quotes's `newVal` doc + private[QuotesImpl] def validValFlags: Flags = Private | Protected | Override | Deferred | Final | Param | Implicit | Lazy | Mutable | Local | ParamAccessor | Module | Package | Case | CaseAccessor | Given | Enum | AbsOverride | JavaStatic // Flags that could be added: Synthetic | Erased | Invisible + // Keep: aligned with Quotes's `newBind` doc + private[QuotesImpl] def validBindFlags: Flags = Case // Flags that could be allowed: Implicit | Given | Erased end Flags given FlagsMethods: FlagsMethods with @@ -2908,7 +2999,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler /** Checks that all definitions in this tree have the expected owner. * Nested definitions are ignored and assumed to be correct by construction. */ - private def xCheckMacroedOwners(tree: Option[Tree], owner: Symbol): tree.type = + private def xCheckedMacroOwners(tree: Option[Tree], owner: Symbol): tree.type = if xCheckMacro then tree match case Some(tree) => @@ -2919,7 +3010,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler /** Checks that all definitions in this tree have the expected owner. * Nested definitions are ignored and assumed to be correct by construction. */ - private def xCheckMacroedOwners(tree: Tree, owner: Symbol): tree.type = + private def xCheckedMacroOwners(tree: Tree, owner: Symbol): tree.type = if xCheckMacro then xCheckMacroOwners(tree, owner) tree @@ -2933,12 +3024,16 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler t match case t: tpd.DefTree => val defOwner = t.symbol.owner - assert(defOwner == owner, + assert(defOwner == owner, { + val ownerName = owner.fullName + val defOwnerName = defOwner.fullName + val duplicateSymbolHint = + if ownerName == defOwnerName then "These are two different symbols instances with the same name. The symbol should have been instantiated only once.\n" + else "" s"""Tree had an unexpected owner for ${t.symbol} |Expected: $owner (${owner.fullName}) |But was: $defOwner (${defOwner.fullName}) - | - | + |$duplicateSymbolHint |The code of the definition of ${t.symbol} is |${Printer.TreeCode.show(t)} | @@ -2952,7 +3047,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler | |Tip: The owner of a tree can be changed using method `Tree.changeOwner`. |Tip: The default owner of definitions created in quotes can be changed using method `Symbol.asQuotes`. - |""".stripMargin) + |""".stripMargin + }) case _ => traverseChildren(t) }.traverse(tree) @@ -2990,6 +3086,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler "Reference to a method must be eta-expanded before it is used as an expression: " + term.show) term + private inline def xCheckMacroAssert(inline cond: Boolean, inline msg: String): Unit = + assert(!xCheckMacro || cond, msg) + object Printer extends PrinterModule: lazy val TreeCode: Printer[Tree] = new Printer[Tree]: @@ -3026,7 +3125,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler lazy val ConstantCode: Printer[Constant] = new Printer[Constant]: def show(const: Constant): String = - const.show(using ctx.fresh.setSetting(ctx.settings.color, "never")) + const.show(using ctx.withoutColors) lazy val ConstantStructure: Printer[Constant] = new Printer[Constant]: def show(const: Constant): String = @@ -3053,14 +3152,14 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler new TypeImpl(tree, SpliceScope.getCurrent).asInstanceOf[scala.quoted.Type[T]] object ExprMatch extends ExprMatchModule: - def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: scala.quoted.Expr[Any])(using pattern: scala.quoted.Expr[Any]): Option[Tup] = + def unapply[TypeBindings, Tup <: Tuple](scrutinee: scala.quoted.Expr[Any])(using pattern: scala.quoted.Expr[Any]): Option[Tup] = val scrutineeTree = reflect.asTerm(scrutinee) val patternTree = reflect.asTerm(pattern) treeMatch(scrutineeTree, patternTree).asInstanceOf[Option[Tup]] end ExprMatch object TypeMatch extends TypeMatchModule: - def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: scala.quoted.Type[?])(using pattern: scala.quoted.Type[?]): Option[Tup] = + def unapply[TypeBindings, Tup <: Tuple](scrutinee: scala.quoted.Type[?])(using pattern: scala.quoted.Type[?]): Option[Tup] = val scrutineeTree = reflect.TypeTree.of(using scrutinee) val patternTree = reflect.TypeTree.of(using pattern) treeMatch(scrutineeTree, patternTree).asInstanceOf[Option[Tup]] @@ -3090,23 +3189,30 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler if typeHoles.isEmpty then ctx else val ctx1 = ctx.fresh.setFreshGADTBounds.addMode(dotc.core.Mode.GadtConstraintInference) - ctx1.gadt.addToConstraint(typeHoles) + ctx1.gadtState.addToConstraint(typeHoles) ctx1 - val matchings = QuoteMatcher.treeMatch(scrutinee, pat1)(using ctx1) - - if typeHoles.isEmpty then matchings - else { - // After matching and doing all subtype checks, we have to approximate all the type bindings - // that we have found, seal them in a quoted.Type and add them to the result - def typeHoleApproximation(sym: Symbol) = - val fromAboveAnnot = sym.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_fromAboveAnnot) - val fullBounds = ctx1.gadt.fullBounds(sym) - val tp = if fromAboveAnnot then fullBounds.hi else fullBounds.lo - reflect.TypeReprMethods.asType(tp) - matchings.map { tup => - Tuple.fromIArray(typeHoles.map(typeHoleApproximation).toArray.asInstanceOf[IArray[Object]]) ++ tup + // After matching and doing all subtype checks, we have to approximate all the type bindings + // that we have found, seal them in a quoted.Type and add them to the result + def typeHoleApproximation(sym: Symbol) = + val fromAboveAnnot = sym.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_fromAboveAnnot) + val fullBounds = ctx1.gadt.fullBounds(sym) + if fromAboveAnnot then fullBounds.hi else fullBounds.lo + + QuoteMatcher.treeMatch(scrutinee, pat1)(using ctx1).map { matchings => + import QuoteMatcher.MatchResult.* + lazy val spliceScope = SpliceScope.getCurrent + val typeHoleApproximations = typeHoles.map(typeHoleApproximation) + val typeHoleMapping = Map(typeHoles.zip(typeHoleApproximations)*) + val typeHoleMap = new Types.TypeMap { + def apply(tp: Types.Type): Types.Type = tp match + case Types.TypeRef(Types.NoPrefix, _) => typeHoleMapping.getOrElse(tp.typeSymbol, tp) + case _ => mapOver(tp) } + val matchedExprs = matchings.map(_.toExpr(typeHoleMap, spliceScope)) + val matchedTypes = typeHoleApproximations.map(reflect.TypeReprMethods.asType) + val results = matchedTypes ++ matchedExprs + Tuple.fromIArray(IArray.unsafeFromArray(results.toArray)) } } diff --git a/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala b/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala index 36da30e112c8..d4cea83efde8 100644 --- a/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala @@ -14,6 +14,4 @@ final class TypeImpl(val typeTree: tpd.Tree, val scope: Scope) extends Type[?] { } override def hashCode(): Int = typeTree.hashCode() - - override def toString: String = "Type.of[...]" } diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index 0bea8f0ab643..c229338ad228 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -57,7 +57,6 @@ object Extractors { if (flags.is(Flags.Scala2x)) flagList += "Flags.Scala2x" if (flags.is(Flags.Sealed)) flagList += "Flags.Sealed" if (flags.is(Flags.StableRealizable)) flagList += "Flags.StableRealizable" - if (flags.is(Flags.Static)) flagList += "Flags.javaStatic" if (flags.is(Flags.Synthetic)) flagList += "Flags.Synthetic" if (flags.is(Flags.Trait)) flagList += "Flags.Trait" if (flags.is(Flags.Transparent)) flagList += "Flags.Transparent" diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index 5d61902fbedd..a6a773adc9ba 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -57,7 +57,6 @@ object SourceCode { if (flags.is(Flags.Scala2x)) flagList += "scala2x" if (flags.is(Flags.Sealed)) flagList += "sealed" if (flags.is(Flags.StableRealizable)) flagList += "stableRealizable" - if (flags.is(Flags.Static)) flagList += "javaStatic" if (flags.is(Flags.Synthetic)) flagList += "synthetic" if (flags.is(Flags.Trait)) flagList += "trait" if (flags.is(Flags.Transparent)) flagList += "transparent" @@ -1346,18 +1345,22 @@ object SourceCode { } private def printBoundsTree(bounds: TypeBoundsTree)(using elideThis: Option[Symbol]): this.type = { - bounds.low match { - case Inferred() => - case low => - this += " >: " - printTypeTree(low) - } - bounds.hi match { - case Inferred() => this - case hi => - this += " <: " - printTypeTree(hi) - } + if bounds.low.tpe == bounds.hi.tpe then + this += " = " + printTypeTree(bounds.low) + else + bounds.low match { + case Inferred() => + case low => + this += " >: " + printTypeTree(low) + } + bounds.hi match { + case Inferred() => this + case hi => + this += " <: " + printTypeTree(hi) + } } private def printBounds(bounds: TypeBounds)(using elideThis: Option[Symbol]): this.type = { diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index 944bf1957d43..8f9a9bd69a50 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -170,12 +170,16 @@ object CoursierScalaTests: /** Get coursier script */ @BeforeClass def setup(): Unit = - val ver = execCmd("uname")._2.head.replace('L', 'l').replace('D', 'd') + val launcherLocation = "https://github.com/coursier/launchers/raw/master" + val launcherName = execCmd("uname")._2.head.toLowerCase match + case "linux" => "cs-x86_64-pc-linux" + case "darwin" => "cs-x86_64-apple-darwin" + case other => fail(s"Unsupported OS for coursier launcher: $other") def runAndCheckCmd(cmd: String, options: String*): Unit = val (code, out) = execCmd(cmd, options*) if code != 0 then fail(s"Failed to run $cmd ${options.mkString(" ")}, exit code: $code, output: ${out.mkString("\n")}") - runAndCheckCmd("curl", s"-fLo cs https://git.io/coursier-cli-$ver") + runAndCheckCmd("curl", s"-fLo cs $launcherLocation/$launcherName") runAndCheckCmd("chmod", "+x cs") diff --git a/compiler/test-resources/repl-macros/i15104a b/compiler/test-resources/repl-macros/i15104a new file mode 100644 index 000000000000..92e82928b509 --- /dev/null +++ b/compiler/test-resources/repl-macros/i15104a @@ -0,0 +1,7 @@ +scala> import scala.quoted._ +scala> object Foo { def macroImpl(using Quotes) = Expr(1) } +// defined object Foo +scala> inline def foo = ${ Foo.macroImpl } +def foo: Int +scala> foo +val res0: Int = 1 diff --git a/compiler/test-resources/repl-macros/i15104b b/compiler/test-resources/repl-macros/i15104b new file mode 100644 index 000000000000..ebbdb2402076 --- /dev/null +++ b/compiler/test-resources/repl-macros/i15104b @@ -0,0 +1,5 @@ +scala> import scala.quoted._ +scala> object Foo { def macroImpl(using Quotes) = Expr(1); inline def foo = ${ Foo.macroImpl } } +// defined object Foo +scala> Foo.foo +val res0: Int = 1 diff --git a/compiler/test-resources/repl-macros/i15104c b/compiler/test-resources/repl-macros/i15104c new file mode 100644 index 000000000000..482b9487c9d9 --- /dev/null +++ b/compiler/test-resources/repl-macros/i15104c @@ -0,0 +1,7 @@ +scala> import scala.quoted._ +scala> def macroImpl(using Quotes) = Expr(1) +def macroImpl(using x$1: quoted.Quotes): quoted.Expr[Int] +scala> inline def foo = ${ macroImpl } +def foo: Int +scala> foo +val res0: Int = 1 diff --git a/compiler/test-resources/repl-macros/i5551 b/compiler/test-resources/repl-macros/i5551 index fb039ed19dd6..984551438b51 100644 --- a/compiler/test-resources/repl-macros/i5551 +++ b/compiler/test-resources/repl-macros/i5551 @@ -1,8 +1,7 @@ scala> import scala.quoted._ scala> def assertImpl(expr: Expr[Boolean])(using q: Quotes) = '{ if !($expr) then throw new AssertionError("failed assertion")} def assertImpl - (expr: quoted.Expr[Boolean]) - (using q: quoted.Quotes): quoted.Expr[Unit] + (expr: quoted.Expr[Boolean])(using q: quoted.Quotes): scala.quoted.Expr[Unit] scala> inline def assert(expr: => Boolean): Unit = ${ assertImpl('{expr}) } def assert(expr: => Boolean): Unit diff --git a/compiler/test-resources/repl/i10355 b/compiler/test-resources/repl/i10355 index bfe3af835c87..294b9d7f1101 100644 --- a/compiler/test-resources/repl/i10355 +++ b/compiler/test-resources/repl/i10355 @@ -1,5 +1,7 @@ scala> import scala.quoted._ scala> def foo(expr: Expr[Any])(using Quotes) = expr match { case '{ $x: t } => '{ $x: Any } } -def foo(expr: quoted.Expr[Any])(using x$2: quoted.Quotes): quoted.Expr[Any] +def foo + (expr: quoted.Expr[Any])(using x$2: quoted.Quotes): scala.quoted.Expr[Any] scala> def bar(expr: Expr[Any])(using Quotes) = expr match { case '{ $x: t } => '{ val a: t = ??? ; ???} } -def bar(expr: quoted.Expr[Any])(using x$2: quoted.Quotes): quoted.Expr[Nothing] +def bar + (expr: quoted.Expr[Any])(using x$2: quoted.Quotes): scala.quoted.Expr[Nothing] diff --git a/compiler/test-resources/repl/i1370 b/compiler/test-resources/repl/i1370 index 6582e03b6539..4bd92b4d5f83 100644 --- a/compiler/test-resources/repl/i1370 +++ b/compiler/test-resources/repl/i1370 @@ -1,5 +1,5 @@ scala> object Lives { class Private { def foo1: Any = new Private.C1; def foo2: Any = new Private.C2 }; object Private { class C1 private {}; private class C2 {} } } --- Error: ---------------------------------------------------------------------- +-- [E173] Reference Error: ----------------------------------------------------- 1 | object Lives { class Private { def foo1: Any = new Private.C1; def foo2: Any = new Private.C2 }; object Private { class C1 private {}; private class C2 {} } } | ^^^^^^^^^^ |constructor C1 cannot be accessed as a member of Lives.Private.C1 from class Private. diff --git a/compiler/test-resources/repl/i15493 b/compiler/test-resources/repl/i15493 index f543f5c1d0f7..670cf8ebcbd2 100644 --- a/compiler/test-resources/repl/i15493 +++ b/compiler/test-resources/repl/i15493 @@ -142,3 +142,8 @@ val res33: Outer.Foo = Outer$Foo@17 scala> res33.toString val res34: String = Outer$Foo@17 +scala> Vector.unapplySeq(Vector(2)) +val res35: scala.collection.SeqFactory.UnapplySeqWrapper[Int] = scala.collection.SeqFactory$UnapplySeqWrapper@df507bfd + +scala> new scala.concurrent.duration.DurationInt(5) +val res36: scala.concurrent.duration.package.DurationInt = scala.concurrent.duration.package$DurationInt@5 diff --git a/compiler/test-resources/repl/i17231 b/compiler/test-resources/repl/i17231 new file mode 100644 index 000000000000..07a509fea917 --- /dev/null +++ b/compiler/test-resources/repl/i17231 @@ -0,0 +1,2 @@ +scala> summon[ValueOf["a"]] +val res0: ValueOf["a"] = scala.ValueOf@61 diff --git a/compiler/test-resources/repl/i4184 b/compiler/test-resources/repl/i4184 index 2c4eb7d12a6f..06b2c81ece21 100644 --- a/compiler/test-resources/repl/i4184 +++ b/compiler/test-resources/repl/i4184 @@ -5,8 +5,11 @@ scala> object bar { class Foo } scala> implicit def eqFoo: CanEqual[foo.Foo, foo.Foo] = CanEqual.derived def eqFoo: CanEqual[foo.Foo, foo.Foo] scala> object Bar { new foo.Foo == new bar.Foo } --- Error: ---------------------------------------------------------------------- +-- [E172] Type Error: ---------------------------------------------------------- 1 | object Bar { new foo.Foo == new bar.Foo } | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Values of types foo.Foo and bar.Foo cannot be compared with == or != -1 error found + | Values of types foo.Foo and bar.Foo² cannot be compared with == or != + | + | where: Foo is a class in object foo + | Foo² is a class in object bar +1 error found \ No newline at end of file diff --git a/compiler/test-resources/repl/i7644 b/compiler/test-resources/repl/i7644 index 8ceaf8b00804..786823073470 100644 --- a/compiler/test-resources/repl/i7644 +++ b/compiler/test-resources/repl/i7644 @@ -5,11 +5,7 @@ scala> class T extends CanEqual | Cannot extend sealed trait CanEqual in a different source file | | longer explanation available when compiling with `-explain` --- [E056] Syntax Error: -------------------------------------------------------- -1 | class T extends CanEqual - | ^^^^^^^^ - | Missing type parameter for CanEqual -2 errors found +1 error found scala> class T extends CanEqual -- [E112] Syntax Error: -------------------------------------------------------- 1 | class T extends CanEqual @@ -17,8 +13,5 @@ scala> class T extends CanEqual | Cannot extend sealed trait CanEqual in a different source file | | longer explanation available when compiling with `-explain` --- [E056] Syntax Error: -------------------------------------------------------- -1 | class T extends CanEqual - | ^^^^^^^^ - | Missing type parameter for CanEqual -2 errors found +1 error found + diff --git a/compiler/test-resources/type-printer/source-compatible b/compiler/test-resources/type-printer/source-compatible new file mode 100644 index 000000000000..d0773a11a795 --- /dev/null +++ b/compiler/test-resources/type-printer/source-compatible @@ -0,0 +1,17 @@ +scala> case class Bag() extends reflect.Selectable +// defined case class Bag +scala> val m = new Bag { val f = 23; def g = 47; def h(i: Int): Int = i; var i = 101; type N = Int; val l = List(42); def p[T](t: T) = t.toString() } +val m: + Bag{ + val f: Int; def g: Int; def h(i: Int): Int; val i: Int; + def i_=(x$1: Int): Unit; type N = Int; val l: List[Int]; + def p[T](t: T): String + } = Bag() +scala> type t = Bag { val f: Int; def g: Int; def h(i: Int): Int; val i: Int; def i_=(x$1: Int): Unit; type N = Int; val l: List[Int]; val s: String @unchecked } +// defined alias type t + = + Bag{ + val f: Int; def g: Int; def h(i: Int): Int; val i: Int; + def i_=(x$1: Int): Unit; type N = Int; val l: List[Int]; + val s: String @unchecked + } diff --git a/compiler/test/dotc/comptest.scala b/compiler/test/dotc/comptest.scala index bd0d800e641c..fb53f561a94d 100644 --- a/compiler/test/dotc/comptest.scala +++ b/compiler/test/dotc/comptest.scala @@ -12,6 +12,7 @@ object comptest extends ParallelTesting { def isInteractive = true def testFilter = Nil def updateCheckFiles: Boolean = false + def failedTests = None val posDir = "./tests/pos/" val negDir = "./tests/neg/" diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index a7d8778d4c61..9888916a86c9 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -19,6 +19,9 @@ i12299a.scala i13871.scala i15181.scala i15922.scala +t5031_2.scala +i16997.scala +i7414.scala # Tree is huge and blows stack for printing Text i7034.scala @@ -45,6 +48,7 @@ i9999.scala i6505.scala i15158.scala i15155.scala +i15827.scala # Opaque type i5720.scala @@ -84,7 +88,6 @@ boxmap-paper.scala # Function types print differnt after unpickling since test mispredicts Feature.preFundsEnabled caps-universal.scala - # GADT cast applied to singleton type difference i4176-gadt.scala @@ -92,3 +95,6 @@ i4176-gadt.scala i13974a.scala java-inherited-type1 + +# recursion limit exceeded +i7445b.scala diff --git a/compiler/test/dotc/run-lazy-vals-tests.allowlist b/compiler/test/dotc/run-lazy-vals-tests.allowlist index 98973dc2893d..361795bcc5fd 100644 --- a/compiler/test/dotc/run-lazy-vals-tests.allowlist +++ b/compiler/test/dotc/run-lazy-vals-tests.allowlist @@ -38,7 +38,6 @@ null-lazy-val.scala patmatch-classtag.scala priorityQueue.scala serialization-new-legacy.scala -serialization-new.scala singletons.scala statics.scala stream_flatmap_odds.scala diff --git a/compiler/test/dotty/Properties.scala b/compiler/test/dotty/Properties.scala index f4e0ed5f615f..cc47303d5468 100644 --- a/compiler/test/dotty/Properties.scala +++ b/compiler/test/dotty/Properties.scala @@ -13,6 +13,10 @@ object Properties { prop == null || prop == "TRUE" } + /** If property is unset or FALSE we consider it `false` */ + private def propIsTrue(name: String): Boolean = + sys.props.getOrElse(name, "FALSE") == "TRUE" + /** Are we running on the CI? */ val isRunByCI: Boolean = sys.env.isDefinedAt("DOTTY_CI_RUN") || sys.env.isDefinedAt("DRONE") // TODO remove this when we drop Drone @@ -30,9 +34,11 @@ object Properties { */ val testsFilter: List[String] = sys.props.get("dotty.tests.filter").fold(Nil)(_.split(',').toList) + /** Run only failed tests */ + val rerunFailed: Boolean = propIsTrue("dotty.tests.rerunFailed") + /** Tests should override the checkfiles with the current output */ - val testsUpdateCheckfile: Boolean = - sys.props.getOrElse("dotty.tests.updateCheckfiles", "FALSE") == "TRUE" + val testsUpdateCheckfile: Boolean = propIsTrue("dotty.tests.updateCheckfiles") /** When set, the run tests are only compiled - not run, a warning will be * issued @@ -85,6 +91,9 @@ object Properties { /** jline-reader jar */ def jlineReader: String = sys.props("dotty.tests.classes.jlineReader") + /** scalajs-javalib jar */ + def scalaJSJavalib: String = sys.props("dotty.tests.classes.scalaJSJavalib") + /** scalajs-library jar */ def scalaJSLibrary: String = sys.props("dotty.tests.classes.scalaJSLibrary") } diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index 2c618ea91e96..ac4ba3ee0e75 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -597,7 +597,7 @@ class DottyBytecodeTests extends DottyBytecodeTest { val clsIn = dir.lookupName("Test.class", directory = false).input val clsNode = loadClassNode(clsIn) val method = getMethod(clsNode, "test") - assertEquals(88, instructionsFromMethod(method).size) + assertEquals(23, instructionsFromMethod(method).size) } } @@ -1622,7 +1622,6 @@ class DottyBytecodeTests extends DottyBytecodeTest { val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) val expected = List( - LineNumber(2, Label(0)), LineNumber(3, Label(0)), LineNumber(4, Label(5)), // case y => LineNumber(5, Label(9)), @@ -1664,7 +1663,6 @@ class DottyBytecodeTests extends DottyBytecodeTest { val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) val expected = List( - LineNumber(2, Label(0)), LineNumber(3, Label(0)), LineNumber(4, Label(5)), // case a if a == 3 => LineNumber(5, Label(15)), diff --git a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala index ea9009de1d9e..6173842e9ad1 100644 --- a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala @@ -163,28 +163,27 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(6, Label(0)), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(8, Label(6)), + Label(5), + LineNumber(8, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(3, Label(11)), + Label(10), + LineNumber(3, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "tracking"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(16), - LineNumber(10, Label(16)), + Label(15), + LineNumber(10, Label(15)), VarOp(ALOAD, 0), Ldc(LDC, "inner"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(22) + Label(21) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -228,23 +227,22 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(12, Label(0)), LineNumber(7, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(3, Label(6)), + Label(5), + LineNumber(3, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(14, Label(11)), + Label(10), + LineNumber(14, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(17) + Label(16) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -288,23 +286,22 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(12, Label(0)), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(8, Label(6)), + Label(5), + LineNumber(8, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "fgh"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(14, Label(11)), + Label(10), + LineNumber(14, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(17) + Label(16) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -349,23 +346,22 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(13, Label(0)), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(3, Label(6)), + Label(5), + LineNumber(3, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(15, Label(11)), + Label(10), + LineNumber(15, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(17) + Label(16) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -582,6 +578,63 @@ class InlineBytecodeTests extends DottyBytecodeTest { } } + @Test def beta_reduce_polymorphic_function = { + val source = """class Test: + | def test = + | ([Z] => (arg: Z) => { val a: Z = arg; a }).apply[Int](2) + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = + List( + Op(ICONST_2), + VarOp(ISTORE, 1), + VarOp(ILOAD, 1), + Op(IRETURN) + ) + + assert(instructions == expected, + "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + + } + } + + @Test def beta_reduce_function_of_opaque_types = { + val source = """object foo: + | opaque type T = Int + | inline def apply(inline op: T => T): T = op(2) + | + |class Test: + | def test = foo { n => n } + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = + List( + Field(GETSTATIC, "foo$", "MODULE$", "Lfoo$;"), + VarOp(ASTORE, 1), + VarOp(ALOAD, 1), + VarOp(ASTORE, 2), + Op(ICONST_2), + Op(IRETURN), + ) + + assert(instructions == expected, + "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + + } + } + @Test def i9456 = { val source = """class Foo { | def test: Int = inline2(inline1(2.+)) @@ -600,13 +653,7 @@ class InlineBytecodeTests extends DottyBytecodeTest { val instructions = instructionsFromMethod(fun) val expected = // TODO room for constant folding List( - Op(ICONST_1), - VarOp(ISTORE, 1), - Op(ICONST_2), - VarOp(ILOAD, 1), - Op(IADD), - Op(ICONST_3), - Op(IADD), + IntOp(BIPUSH, 6), Op(IRETURN), ) assert(instructions == expected, diff --git a/compiler/test/dotty/tools/backend/jvm/LabelBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/LabelBytecodeTests.scala new file mode 100644 index 000000000000..aea567b87f91 --- /dev/null +++ b/compiler/test/dotty/tools/backend/jvm/LabelBytecodeTests.scala @@ -0,0 +1,166 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import org.junit.Assert._ +import org.junit.Test + +import scala.tools.asm +import asm._ +import asm.tree._ + +import scala.tools.asm.Opcodes +import scala.jdk.CollectionConverters._ +import Opcodes._ + +class LabelBytecodeTests extends DottyBytecodeTest { + import ASMConverters._ + + @Test def localLabelBreak = { + testLabelBytecodeEquals( + """val local = boundary.Label[Long]() + |try break(5L)(using local) + |catch case ex: boundary.Break[Long] @unchecked => + | if ex.label eq local then ex.value + | else throw ex + """.stripMargin, + "Long", + Ldc(LDC, 5), + Op(LRETURN) + ) + } + + @Test def simpleBoundaryBreak = { + testLabelBytecodeEquals( + """boundary: l ?=> + | break(2)(using l) + """.stripMargin, + "Int", + Op(ICONST_2), + Op(IRETURN) + ) + + testLabelBytecodeEquals( + """boundary: + | break(3) + """.stripMargin, + "Int", + Op(ICONST_3), + Op(IRETURN) + ) + + testLabelBytecodeEquals( + """boundary: + | break() + """.stripMargin, + "Unit", + Op(RETURN) + ) + } + + @Test def labelExtraction = { + // Test extra Inlined around the label + testLabelBytecodeEquals( + """boundary: + | break(2)(using summon[boundary.Label[Int]]) + """.stripMargin, + "Int", + Op(ICONST_2), + Op(IRETURN) + ) + + // Test extra Block around the label + testLabelBytecodeEquals( + """boundary: l ?=> + | break(2)(using { l }) + """.stripMargin, + "Int", + Op(ICONST_2), + Op(IRETURN) + ) + } + + @Test def boundaryLocalBreak = { + testLabelBytecodeExpect( + """val x: Boolean = true + |boundary[Unit]: + | var i = 0 + | while true do + | i += 1 + | if i > 10 then break() + """.stripMargin, + "Unit", + !throws(_) + ) + } + + @Test def boundaryNonLocalBreak = { + testLabelBytecodeExpect( + """boundary[Unit]: + | nonLocalBreak() + """.stripMargin, + "Unit", + throws + ) + + testLabelBytecodeExpect( + """boundary[Unit]: + | def f() = break() + | f() + """.stripMargin, + "Unit", + throws + ) + } + + @Test def boundaryLocalAndNonLocalBreak = { + testLabelBytecodeExpect( + """boundary[Unit]: l ?=> + | break() + | nonLocalBreak() + """.stripMargin, + "Unit", + throws + ) + } + + private def throws(instructions: List[Instruction]): Boolean = + instructions.exists { + case Op(ATHROW) => true + case _ => false + } + + private def testLabelBytecodeEquals(code: String, tpe: String, expected: Instruction*): Unit = + checkLabelBytecodeInstructions(code, tpe) { instructions => + val expectedList = expected.toList + assert(instructions == expectedList, + "`test` was not properly generated\n" + diffInstructions(instructions, expectedList)) + } + + private def testLabelBytecodeExpect(code: String, tpe: String, expected: List[Instruction] => Boolean): Unit = + checkLabelBytecodeInstructions(code, tpe) { instructions => + assert(expected(instructions), + "`test` was not properly generated\n" + instructions) + } + + private def checkLabelBytecodeInstructions(code: String, tpe: String)(checkOutput: List[Instruction] => Unit): Unit = { + val source = + s"""import scala.util.boundary, boundary.break + |class Test: + | def test: $tpe = { + | ${code.linesIterator.toList.mkString("", "\n ", "")} + | } + | def nonLocalBreak[T](value: T)(using boundary.Label[T]): Nothing = break(value) + | def nonLocalBreak()(using boundary.Label[Unit]): Nothing = break(()) + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + val method = getMethod(clsNode, "test") + + checkOutput(instructionsFromMethod(method)) + } + } + +} diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index cce23cb5c9a6..2a665c478932 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -10,6 +10,7 @@ import org.junit.Assume._ import org.junit.experimental.categories.Category import scala.concurrent.duration._ +import reporting.TestReporter import vulpix._ import java.nio.file._ @@ -35,6 +36,12 @@ class BootstrappedOnlyCompilationTests { ).checkCompile() } + @Test def posWithCompilerCC: Unit = + implicit val testGroup: TestGroup = TestGroup("compilePosWithCompilerCC") + aggregateTests( + compileDir("tests/pos-with-compiler-cc/dotc", withCompilerOptions.and("-language:experimental.captureChecking")) + ).checkCompile() + @Test def posWithCompiler: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePosWithCompiler") aggregateTests( @@ -103,6 +110,8 @@ class BootstrappedOnlyCompilationTests { aggregateTests( compileFilesInDir("tests/neg-macros", defaultOptions.and("-Xcheck-macros")), compileFile("tests/pos-macros/i9570.scala", defaultOptions.and("-Xfatal-warnings")), + compileFile("tests/pos-macros/macro-deprecation.scala", defaultOptions.and("-Xfatal-warnings", "-deprecation")), + compileFile("tests/pos-macros/macro-experimental.scala", defaultOptions.and("-Yno-experimental")), ).checkExpectedErrors() } @@ -123,6 +132,8 @@ class BootstrappedOnlyCompilationTests { compileFilesInDir("tests/run-custom-args/Yretain-trees", defaultOptions and "-Yretain-trees"), compileFilesInDir("tests/run-custom-args/Yread-comments", defaultOptions and "-Yread-docs"), compileFilesInDir("tests/run-custom-args/run-macros-erased", defaultOptions.and("-language:experimental.erasedDefinitions").and("-Xcheck-macros")), + compileDir("tests/run-custom-args/Xmacro-settings/simple", defaultOptions.and("-Xmacro-settings:one,two,three")), + compileDir("tests/run-custom-args/Xmacro-settings/compileTimeEnv", defaultOptions.and("-Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO")), ) }.checkRuns() @@ -214,6 +225,7 @@ object BootstrappedOnlyCompilationTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 261e6af21927..4e86a3b83383 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -16,6 +16,7 @@ import scala.jdk.CollectionConverters._ import scala.util.matching.Regex import scala.concurrent.duration._ import TestSources.sources +import reporting.TestReporter import vulpix._ class CompilationTests { @@ -28,7 +29,7 @@ class CompilationTests { @Test def pos: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePos") - aggregateTests( + var tests = List( compileFile("tests/pos/nullarify.scala", defaultOptions.and("-Ycheck:nullarify")), compileFile("tests/pos-special/utf8encoded.scala", explicitUTF8), compileFile("tests/pos-special/utf16encoded.scala", explicitUTF16), @@ -43,10 +44,11 @@ class CompilationTests { compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), compileFilesInDir("tests/pos-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init")), - // Run tests for experimental lightweight lazy vals - compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylightweight-lazy-vals"), FileFilter.include(TestSources.posLazyValsAllowlist)), + // Run tests for legacy lazy vals + compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/pos-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), + compileFilesInDir("tests/pos-custom-args/strict", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), compileFile( // succeeds despite -Xfatal-warnings because of -nowarn @@ -54,8 +56,6 @@ class CompilationTests { defaultOptions.and("-nowarn", "-Xfatal-warnings") ), compileFile("tests/pos-special/typeclass-scaling.scala", defaultOptions.and("-Xmax-inlines", "40")), - compileFile("tests/pos-special/i7296.scala", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), - compileDir("tests/pos-special/adhoc-extension", defaultOptions.and("-source", "future", "-feature", "-Xfatal-warnings")), compileFile("tests/pos-special/i7575.scala", defaultOptions.andLanguageFeature("dynamics")), compileFile("tests/pos-special/kind-projector.scala", defaultOptions.and("-Ykind-projector")), compileFile("tests/pos-special/kind-projector-underscores.scala", defaultOptions.and("-Ykind-projector:underscores")), @@ -64,10 +64,14 @@ class CompilationTests { compileFile("tests/pos-custom-args/i9267.scala", defaultOptions.and("-Ystop-after:erasure")), compileFile("tests/pos-special/extend-java-enum.scala", defaultOptions.and("-source", "3.0-migration")), compileFile("tests/pos-custom-args/help.scala", defaultOptions.and("-help", "-V", "-W", "-X", "-Y")), - compileFile("tests/pos-custom-args/i10383.scala", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), compileFile("tests/pos-custom-args/i13044.scala", defaultOptions.and("-Xmax-inlines:33")), - compileFile("tests/pos-custom-args/jdk-8-app.scala", defaultOptions.and("-release:8")), - ).checkCompile() + compileFile("tests/pos-custom-args/jdk-8-app.scala", defaultOptions.and("-release:8")) + ) + + if scala.util.Properties.isJavaAtLeast("16") then + tests ::= compileFilesInDir("tests/pos-java16+", defaultOptions.and("-Ysafe-init")) + + aggregateTests(tests*).checkCompile() } @Test def rewrites: Unit = { @@ -82,6 +86,8 @@ class CompilationTests { compileFile("tests/rewrites/i9632.scala", defaultOptions.and("-indent", "-rewrite")), compileFile("tests/rewrites/i11895.scala", defaultOptions.and("-indent", "-rewrite")), compileFile("tests/rewrites/i12340.scala", unindentOptions.and("-rewrite")), + compileFile("tests/rewrites/i17187.scala", unindentOptions.and("-rewrite")), + compileFile("tests/rewrites/i17399.scala", unindentOptions.and("-rewrite")), ).checkRewrites() } @@ -141,29 +147,22 @@ class CompilationTests { compileFilesInDir("tests/neg-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), compileFilesInDir("tests/neg-custom-args/allow-double-bindings", allowDoubleBindings), compileFilesInDir("tests/neg-custom-args/allow-deep-subtypes", allowDeepSubtypes), + compileFilesInDir("tests/neg-custom-args/feature", defaultOptions.and("-Xfatal-warnings", "-feature")), compileFilesInDir("tests/neg-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), compileFilesInDir("tests/neg-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), - compileDir("tests/neg-custom-args/impl-conv", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileDir("tests/neg-custom-args/i13946", defaultOptions.and("-Xfatal-warnings", "-feature")), + compileFilesInDir("tests/neg-custom-args/explain", defaultOptions.and("-explain")), compileFile("tests/neg-custom-args/avoid-warn-deprecation.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileFile("tests/neg-custom-args/implicit-conversions.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileFile("tests/neg-custom-args/implicit-conversions-old.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), compileFile("tests/neg-custom-args/i3246.scala", scala2CompatMode), compileFile("tests/neg-custom-args/overrideClass.scala", scala2CompatMode), compileFile("tests/neg-custom-args/ovlazy.scala", scala2CompatMode.and("-Xfatal-warnings")), compileFile("tests/neg-custom-args/newline-braces.scala", scala2CompatMode.and("-Xfatal-warnings")), compileFile("tests/neg-custom-args/autoTuplingTest.scala", defaultOptions.andLanguageFeature("noAutoTupling")), - compileFile("tests/neg-custom-args/nopredef.scala", defaultOptions.and("-Yno-predef")), - compileFile("tests/neg-custom-args/noimports.scala", defaultOptions.and("-Yno-imports")), - compileFile("tests/neg-custom-args/noimports2.scala", defaultOptions.and("-Yno-imports")), compileFile("tests/neg-custom-args/i1650.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i3882.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i4372.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i1754.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i12650.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i9517.scala", defaultOptions.and("-Xprint-types")), - compileFile("tests/neg-custom-args/i11637.scala", defaultOptions.and("-explain")), - compileFile("tests/neg-custom-args/i15575.scala", defaultOptions.and("-explain")), compileFile("tests/neg-custom-args/interop-polytypes.scala", allowDeepSubtypes.and("-Yexplicit-nulls")), compileFile("tests/neg-custom-args/conditionalWarnings.scala", allowDeepSubtypes.and("-deprecation").and("-Xfatal-warnings")), compileFilesInDir("tests/neg-custom-args/isInstanceOf", allowDeepSubtypes and "-Xfatal-warnings"), @@ -188,11 +187,10 @@ class CompilationTests { compileFile("tests/neg-custom-args/matchable.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), compileFile("tests/neg-custom-args/i7314.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), compileFile("tests/neg-custom-args/capt-wf.scala", defaultOptions.and("-language:experimental.captureChecking", "-Xfatal-warnings")), - compileFile("tests/neg-custom-args/feature-shadowing.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), - compileDir("tests/neg-custom-args/hidden-type-errors", defaultOptions.and("-explain")), compileFile("tests/neg-custom-args/i13026.scala", defaultOptions.and("-print-lines")), compileFile("tests/neg-custom-args/i13838.scala", defaultOptions.and("-Ximplicit-search-limit", "1000")), compileFile("tests/neg-custom-args/jdk-9-app.scala", defaultOptions.and("-release:8")), + compileFile("tests/neg-custom-args/i10994.scala", defaultOptions.and("-source", "future")), ).checkExpectedErrors() } @@ -213,13 +211,11 @@ class CompilationTests { compileFile("tests/run-custom-args/defaults-serizaliable-no-forwarders.scala", defaultOptions and "-Xmixin-force-forwarders:false"), compileFilesInDir("tests/run-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), compileFilesInDir("tests/run-custom-args/fatal-warnings", defaultOptions.and("-Xfatal-warnings")), - compileDir("tests/run-custom-args/Xmacro-settings/simple", defaultOptions.and("-Xmacro-settings:one,two,three")), - compileDir("tests/run-custom-args/Xmacro-settings/compileTimeEnv", defaultOptions.and("-Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO")), compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking")), compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes), - compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init"), FileFilter.exclude("serialization-new.scala")), - // Run tests for experimental lightweight lazy vals and stable lazy vals. - compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init", "-Ylightweight-lazy-vals"), FileFilter.include(TestSources.runLazyValsAllowlist)), + compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init")), + // Run tests for legacy lazy vals. + compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.runLazyValsAllowlist)), ).checkRuns() } @@ -241,7 +237,8 @@ class CompilationTests { ).checkCompile() } - @Test def recheck: Unit = + //@Test disabled in favor of posWithCompilerCC to save time. + def recheck: Unit = given TestGroup = TestGroup("recheck") aggregateTests( compileFilesInDir("tests/new", recheckOptions), @@ -317,6 +314,7 @@ object CompilationTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/FromTastyTests.scala b/compiler/test/dotty/tools/dotc/FromTastyTests.scala index 2684a47b870c..1d46cbbce95c 100644 --- a/compiler/test/dotty/tools/dotc/FromTastyTests.scala +++ b/compiler/test/dotty/tools/dotc/FromTastyTests.scala @@ -5,6 +5,7 @@ package dotc import scala.language.unsafeNulls import org.junit.{AfterClass, Test} +import reporting.TestReporter import vulpix._ import java.io.{File => JFile} @@ -48,6 +49,7 @@ object FromTastyTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala index 84b3f1f8a48f..b515ebb05f96 100644 --- a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala +++ b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala @@ -12,6 +12,7 @@ import org.junit.{AfterClass, Test} import org.junit.experimental.categories.Category import scala.concurrent.duration._ +import reporting.TestReporter import vulpix._ @@ -76,6 +77,7 @@ object IdempotencyTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/SettingsTests.scala b/compiler/test/dotty/tools/dotc/SettingsTests.scala index e3076f055d51..8c571a321548 100644 --- a/compiler/test/dotty/tools/dotc/SettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/SettingsTests.scala @@ -179,6 +179,25 @@ class SettingsTests { assertEquals(100, foo.value) } + @Test def `Set BooleanSettings correctly`: Unit = + object Settings extends SettingGroup: + val foo = BooleanSetting("-foo", "foo", false) + val bar = BooleanSetting("-bar", "bar", true) + val baz = BooleanSetting("-baz", "baz", false) + val qux = BooleanSetting("-qux", "qux", false) + import Settings._ + + val args = List("-foo:true", "-bar:false", "-baz", "-qux:true", "-qux:false") + val summary = processArguments(args, processAll = true) + assertTrue(s"Setting args errors:\n ${summary.errors.take(5).mkString("\n ")}", summary.errors.isEmpty) + withProcessedArgs(summary) { + assertEquals(true, foo.value) + assertEquals(false, bar.value) + assertEquals(true, baz.value) + assertEquals(false, qux.value) + assertEquals(List("Flag -qux set repeatedly"), summary.warnings) + } + private def withProcessedArgs(summary: ArgsSummary)(f: SettingsState ?=> Unit) = f(using summary.sstate) extension [T](setting: Setting[T]) diff --git a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala index e745fa515443..4dfc08cc7e9b 100644 --- a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala +++ b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala @@ -39,51 +39,6 @@ class StringFormatterTest extends AbstractStringFormatterTest: assertEquals("flags=private final ", store.string) end StringFormatterTest -class EmStringFormatterTest extends AbstractStringFormatterTest: - @Test def seq = check("[Any, String]", em"${Seq(defn.AnyType, defn.StringType)}") - @Test def seqSeq = check("Any; String", em"${Seq(defn.AnyType, defn.StringType)}%; %") - @Test def ellipsis = assert(em"$Big".contains("...")) - @Test def err = check("type Err", em"$Err") - @Test def ambig = check("Foo vs Foo", em"$Foo vs $Foo") - @Test def cstrd = check("Foo; Bar", em"$mkCstrd%; %") - @Test def seqErr = check("[class Any, type Err]", em"${Seq(defn.AnyClass, Err)}") - @Test def seqSeqErr = check("class Any; type Err", em"${Seq(defn.AnyClass, Err)}%; %") - @Test def tupleErr = check("(1,type Err)", em"${(1, Err)}") - @Test def tupleAmb = check("(Foo,Foo)", em"${(Foo, Foo)}") - @Test def tupleFlags = check("(Foo,abstract)", em"${(Foo, Abstract)}") - @Test def seqOfTupleFlags = check("[(Foo,abstract)]", em"${Seq((Foo, Abstract))}") -end EmStringFormatterTest - -class ExStringFormatterTest extends AbstractStringFormatterTest: - @Test def seq = check("[Any, String]", ex"${Seq(defn.AnyType, defn.StringType)}") - @Test def seqSeq = check("Any; String", ex"${Seq(defn.AnyType, defn.StringType)}%; %") - @Test def ellipsis = assert(ex"$Big".contains("...")) - @Test def err = check("type Err", ex"$Err") - @Test def ambig = check("""Foo vs Foo² - | - |where: Foo is a type - | Foo² is a type - |""".stripMargin, ex"$Foo vs $Foo") - @Test def cstrd = check("""Foo; Bar - | - |where: Bar is a type variable with constraint <: String - | Foo is a type variable with constraint <: Int - |""".stripMargin, ex"$mkCstrd%; %") - @Test def seqErr = check("[class Any, type Err]", ex"${Seq(defn.AnyClass, Err)}") - @Test def seqSeqErr = check("class Any; type Err", ex"${Seq(defn.AnyClass, Err)}%; %") - @Test def tupleErr = check("(1,type Err)", ex"${(1, Err)}") - @Test def tupleAmb = check("""(Foo,Foo²) - | - |where: Foo is a type - | Foo² is a type - |""".stripMargin, ex"${(Foo, Foo)}") - @Test def seqOfTup3Amb = check("""[(Foo,Foo²,type Err)] - | - |where: Foo is a type - | Foo² is a type - |""".stripMargin, ex"${Seq((Foo, Foo, Err))}") -end ExStringFormatterTest - abstract class AbstractStringFormatterTest extends DottyTest: override def initializeCtx(fc: FreshContext) = super.initializeCtx(fc.setSetting(fc.settings.color, "never")) diff --git a/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala index 9e71b10b206d..50e07f388dc4 100644 --- a/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala +++ b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala @@ -17,6 +17,7 @@ import scala.util.matching.Regex import scala.concurrent.duration._ import TestSources.sources import vulpix._ +import reporting.TestReporter class TastyBootstrapTests { import ParallelTesting._ @@ -114,6 +115,7 @@ object TastyBootstrapTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests implicit val summaryReport: SummaryReporting = new SummaryReport @AfterClass def tearDown(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/TupleShowTests.scala b/compiler/test/dotty/tools/dotc/TupleShowTests.scala new file mode 100644 index 000000000000..2d76c480b001 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/TupleShowTests.scala @@ -0,0 +1,96 @@ +package dotty.tools +package dotc + +import core.*, Decorators.*, Symbols.* +import printing.Texts.* + +import java.lang.System.{ lineSeparator => EOL } +import org.junit.Test + +class TupleShowTests extends DottyTest: + def IntType = defn.IntType + def LongType = defn.LongType + def ShortType = defn.ShortType + def Types_10 = List.fill(5)(IntType) ::: List.fill(5)(LongType) + def Types_20 = Types_10 ::: Types_10 + + val tup0 = defn.tupleType(Nil) + val tup1 = defn.tupleType(IntType :: Nil) + val tup2 = defn.tupleType(IntType :: LongType :: Nil) + val tup3 = defn.tupleType(IntType :: LongType :: ShortType :: Nil) + val tup21 = defn.tupleType(Types_20 ::: IntType :: Nil) + val tup22 = defn.tupleType(Types_20 ::: IntType :: LongType :: Nil) + val tup23 = defn.tupleType(Types_20 ::: IntType :: LongType :: ShortType :: Nil) + val tup24 = defn.tupleType(Types_20 ::: IntType :: LongType :: ShortType :: ShortType :: Nil) + + @Test def tup0_show = chkEq("EmptyTuple.type", i"$tup0") + @Test def tup1_show = chkEq("Tuple1[Int]", i"$tup1") + @Test def tup2_show = chkEq("(Int, Long)", i"$tup2") + @Test def tup3_show = chkEq("(Int, Long, Short)", i"$tup3") + @Test def tup21_show = chkEq(res21, i"$tup21") + @Test def tup22_show = chkEq(res22, i"$tup22") + @Test def tup23_show = chkEq(res23, i"$tup23") + @Test def tup24_show = chkEq(res24, i"$tup24") + + @Test def tup3_text = + val obt = tup3.toText(ctx.printer) + val exp = Fluid(List( + Str(")"), + Str("Short"), + Closed(List(Str(", "), Str("Long"))), + Closed(List(Str(", "), Str("Int"))), + Str("("), + )) + chkEq(exp, obt) + + @Test def tup3_layout10 = + val obt = tup3.toText(ctx.printer).layout(10) + val exp = Fluid(List( + Str(" Short)"), + Str(" Long, "), + Str("(Int, "), + )) + chkEq(exp, obt) + + @Test def tup3_show10 = chkEq("(Int,\n Long,\n Short)".normEOL, tup3.toText(ctx.printer).mkString(10, false)) + + val res21 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int)""".stripMargin.normEOL + + val res22 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int, Long)""".stripMargin.normEOL + + val res23 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int, Long, Short)""".stripMargin.normEOL + + val res24 = """|(Int, Int, Int, Int, Int, Long, Long, Long, Long, Long, Int, Int, Int, Int, + | Int, Long, Long, Long, Long, Long, Int, Long, Short, Short)""".stripMargin.normEOL + + def chkEq[A](expected: A, obtained: A) = assert(expected == obtained, diff(s"$expected", s"$obtained")) + + /** On Windows the string literal in this test source file will be read with `\n` (b/c of "-encoding UTF8") + * but the compiler will correctly emit \r\n as the line separator. + * So we align the expected result to faithfully compare test results. */ + extension (str: String) def normEOL = if EOL == "\n" then str else str.replace("\n", EOL).nn + + def diff(exp: String, obt: String) = + val min = math.min(exp.length, obt.length) + val pre = + var i = 0 + while i < min && exp(i) == obt(i) do i += 1 + exp.take(i) + val suf = + val max = min - pre.length - 1 + var i = 0 + while i <= max && exp(exp.length - 1 - i) == obt(obt.length - 1 - i) do i += 1 + exp.drop(exp.length - 1) + + import scala.io.AnsiColor.* + val ellip = BLACK + BOLD + "..." + RESET + val compactPre = if pre.length <= 20 then pre else ellip + pre.drop(pre.length - 20) + val compactSuf = if suf.length <= 20 then suf else suf.take(20) + ellip + def extractDiff(s: String) = s.slice(pre.length, s.length - suf.length) + s"""|Comparison Failure: + | expected: $compactPre${CYAN }${extractDiff(exp)}$RESET$compactSuf + | obtained: $compactPre$MAGENTA${extractDiff(obt)}$RESET$compactSuf + |""".stripMargin diff --git a/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala b/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala index 5ab162b9f05c..9ae3fda8c6b9 100644 --- a/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala +++ b/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala @@ -53,3 +53,41 @@ class ConstraintsTest: i"Merging constraints `?S <: ?T` and `Int <: ?S` should result in `Int <:< ?T`: ${ctx.typerState.constraint}") } end mergeBoundsTransitivity + + @Test def validBoundsInit: Unit = inCompilerContext( + TestConfiguration.basicClasspath, + scalaSources = "trait A { def foo[S >: T <: T | Int, T <: String]: Any }") { + val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 + val List(s, t) = tvars.tpes + + val TypeBounds(lo, hi) = ctx.typerState.constraint.entry(t.asInstanceOf[TypeVar].origin): @unchecked + assert(lo =:= defn.NothingType, i"Unexpected lower bound $lo for $t: ${ctx.typerState.constraint}") + assert(hi =:= defn.StringType, i"Unexpected upper bound $hi for $t: ${ctx.typerState.constraint}") // used to be Any + } + + @Test def validBoundsUnify: Unit = inCompilerContext( + TestConfiguration.basicClasspath, + scalaSources = "trait A { def foo[S >: T <: T | Int, T <: String | Int]: Any }") { + val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 + val List(s, t) = tvars.tpes + + s <:< t + + val TypeBounds(lo, hi) = ctx.typerState.constraint.entry(t.asInstanceOf[TypeVar].origin): @unchecked + assert(lo =:= defn.NothingType, i"Unexpected lower bound $lo for $t: ${ctx.typerState.constraint}") + assert(hi =:= (defn.StringType | defn.IntType), i"Unexpected upper bound $hi for $t: ${ctx.typerState.constraint}") + } + + @Test def validBoundsReplace: Unit = inCompilerContext( + TestConfiguration.basicClasspath, + scalaSources = "trait X; trait A { def foo[S <: U | X, T, U]: Any }") { + val tvarTrees = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2 + val tvars @ List(s, t, u) = tvarTrees.tpes.asInstanceOf[List[TypeVar]] + s =:= t + t =:= u + + for tvar <- tvars do + val entry = ctx.typerState.constraint.entry(tvar.origin) + assert(!ctx.typerState.constraint.occursAtToplevel(tvar.origin, entry), + i"cyclic bound for ${tvar.origin}: ${entry} in ${ctx.typerState.constraint}") + } diff --git a/compiler/test/dotty/tools/dotc/core/ShowDecoratorTest.scala b/compiler/test/dotty/tools/dotc/core/ShowDecoratorTest.scala new file mode 100644 index 000000000000..acc9d1914bf6 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/core/ShowDecoratorTest.scala @@ -0,0 +1,21 @@ +package dotty.tools +package dotc +package core + +import Contexts.*, Decorators.*, Denotations.*, SymDenotations.*, Symbols.*, Types.* +import printing.Formatting.Show + +import org.junit.Test +import org.junit.Assert.* + +class ShowDecoratorTest extends DottyTest: + import ShowDecoratorTest.* + + @Test def t1 = assertEquals("... (cannot display due to FooException boom) ...", Foo().tryToShow) +end ShowDecoratorTest + +object ShowDecoratorTest: + import printing.*, Texts.* + class FooException extends Exception("boom") + case class Foo() extends Showable: + def toText(printer: Printer): Text = throw new FooException diff --git a/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala b/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala index 5d9458fe95c9..77e172f61167 100644 --- a/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala +++ b/compiler/test/dotty/tools/dotc/coverage/CoverageTests.scala @@ -4,13 +4,13 @@ import org.junit.Test import org.junit.AfterClass import org.junit.Assert.* import org.junit.experimental.categories.Category - import dotty.{BootstrappedOnlyTests, Properties} import dotty.tools.vulpix.* import dotty.tools.vulpix.TestConfiguration.* import dotty.tools.dotc.Main +import dotty.tools.dotc.reporting.TestReporter -import java.nio.file.{Files, FileSystems, Path, Paths, StandardCopyOption} +import java.nio.file.{FileSystems, Files, Path, Paths, StandardCopyOption} import scala.jdk.CollectionConverters.* import scala.util.Properties.userDir import scala.language.unsafeNulls @@ -85,6 +85,7 @@ object CoverageTests extends ParallelTesting: def testFilter = Properties.testsFilter def isInteractive = SummaryReport.isInteractive def updateCheckFiles = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests given summaryReport: SummaryReporting = SummaryReport() @AfterClass def tearDown(): Unit = diff --git a/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala b/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala index a54880326704..bb2797c5d034 100644 --- a/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala +++ b/compiler/test/dotty/tools/dotc/parsing/DeSugarTest.scala @@ -59,8 +59,8 @@ class DeSugarTest extends ParserTest { cpy.DefDef(tree1)(name, transformParamss(paramss), transform(tpt, Type), transform(tree1.rhs)) case tree1 @ TypeDef(name, rhs) => cpy.TypeDef(tree1)(name, transform(rhs, Type)) - case impl @ Template(constr, parents, self, _) => - cpy.Template(tree1)(transformSub(constr), transform(parents), Nil, transformSub(self), transform(impl.body, Expr)) + case impl @ Template(constr, _, self, _) => + cpy.Template(tree1)(transformSub(constr), transform(impl.parentsOrDerived), Nil, transformSub(self), transform(impl.body, Expr)) case Thicket(trees) => Thicket(flatten(trees mapConserve super.transform)) case tree1 => diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index 639b04089abc..2c970e93f573 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -21,6 +21,7 @@ import scala.io.Source import org.junit.Test import scala.util.Using import java.io.File + class PrintingTest { def options(phase: String, flags: List[String]) = @@ -45,7 +46,7 @@ class PrintingTest { } val actualLines = byteStream.toString(StandardCharsets.UTF_8.name).linesIterator - FileDiff.checkAndDump(path.toString, actualLines.toIndexedSeq, checkFilePath) + FileDiff.checkAndDumpOrUpdate(path.toString, actualLines.toIndexedSeq, checkFilePath) } def testIn(testsDir: String, phase: String) = diff --git a/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala b/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala index 2f35ccb35434..2e4b7bf1bb3f 100644 --- a/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala +++ b/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala @@ -12,8 +12,7 @@ class SyntaxHighlightingTests extends DottyTest { import SyntaxHighlighting._ private def test(source: String, expected: String): Unit = { - val testCtx = ctx.fresh.setSetting(ctx.settings.color, "always") - val highlighted = SyntaxHighlighting.highlight(source)(using testCtx) + val highlighted = SyntaxHighlighting.highlight(source)(using ctx.withColors) .replace(NoColor, ">") .replace(CommentColor, " JFile, FileOutputStream, StringWriter } +import java.io.{BufferedReader, FileInputStream, FileOutputStream, FileReader, PrintStream, PrintWriter, StringReader, StringWriter, File as JFile} import java.text.SimpleDateFormat import java.util.Date -import core.Decorators._ +import core.Decorators.* import scala.collection.mutable - +import scala.jdk.CollectionConverters.* import util.SourcePosition -import core.Contexts._ -import Diagnostic._ -import interfaces.Diagnostic.{ ERROR, WARNING } +import core.Contexts.* +import Diagnostic.* +import dotty.Properties +import interfaces.Diagnostic.{ERROR, WARNING} + +import scala.io.Codec class TestReporter protected (outWriter: PrintWriter, filePrintln: String => Unit, logLevel: Int) extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with MessageRendering { @@ -84,17 +86,23 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M } object TestReporter { + private val testLogsDirName: String = "testlogs" + private val failedTestsFileName: String = "last-failed.log" + private val failedTestsFile: JFile = new JFile(s"$testLogsDirName/$failedTestsFileName") + private var outFile: JFile = _ private var logWriter: PrintWriter = _ + private var failedTestsWriter: PrintWriter = _ private def initLog() = if (logWriter eq null) { val date = new Date val df0 = new SimpleDateFormat("yyyy-MM-dd") val df1 = new SimpleDateFormat("yyyy-MM-dd-'T'HH-mm-ss") - val folder = s"testlogs/tests-${df0.format(date)}" + val folder = s"$testLogsDirName/tests-${df0.format(date)}" new JFile(folder).mkdirs() outFile = new JFile(s"$folder/tests-${df1.format(date)}.log") logWriter = new PrintWriter(new FileOutputStream(outFile, true)) + failedTestsWriter = new PrintWriter(new FileOutputStream(failedTestsFile, false)) } def logPrintln(str: String) = { @@ -144,4 +152,16 @@ object TestReporter { } rep } + + def lastRunFailedTests: Option[List[String]] = + Option.when( + Properties.rerunFailed && + failedTestsFile.exists() && + failedTestsFile.isFile + )(java.nio.file.Files.readAllLines(failedTestsFile.toPath).asScala.toList) + + def writeFailedTests(tests: List[String]): Unit = + initLog() + tests.foreach(failed => failedTestsWriter.println(failed)) + failedTestsWriter.flush() } diff --git a/compiler/test/dotty/tools/dotc/reporting/UserDefinedErrorMessages.scala b/compiler/test/dotty/tools/dotc/reporting/UserDefinedErrorMessages.scala index 4d73b0d88b55..807d3a19f8f3 100644 --- a/compiler/test/dotty/tools/dotc/reporting/UserDefinedErrorMessages.scala +++ b/compiler/test/dotty/tools/dotc/reporting/UserDefinedErrorMessages.scala @@ -26,9 +26,9 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "Could not prove Int =!= Int") + assertEquals(m.message, "Could not prove Int =!= Int") } @Test def userDefinedImplicitAmbiguous2 = @@ -50,9 +50,9 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "Could not prove Int =!= Int") + assertEquals(m.message, "Could not prove Int =!= Int") } @Test def userDefinedImplicitAmbiguous3 = @@ -75,9 +75,9 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "Could not prove Int =!= Int") + assertEquals(m.message, "Could not prove Int =!= Int") } @Test def userDefinedImplicitAmbiguous4 = @@ -97,9 +97,9 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "msg A=Any") + assertEquals(m.message, "msg A=Any") } @Test def userDefinedImplicitAmbiguous5 = @@ -119,8 +119,8 @@ class UserDefinedErrorMessages extends ErrorMessagesTest { given Context = itcx assertMessageCount(1, messages) - val (m: NoExplanation) :: Nil = messages: @unchecked + val (m: TypeMsg) :: Nil = messages: @unchecked - assertEquals(m.msg, "msg A=Any") + assertEquals(m.message, "msg A=Any") } } diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index eb6ab8e8fb5f..1e7d7ef2c708 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -20,7 +20,7 @@ class PatmatExhaustivityTest { val testsDir = "tests/patmat" // pagewidth/color: for a stable diff as the defaults are based on the terminal (e.g size) // stop-after: patmatexhaust-huge.scala crash compiler (but also hides other warnings..) - val options = List("-pagewidth", "80", "-color:never", "-Ystop-after:explicitSelf", "-classpath", TestConfiguration.basicClasspath) + val options = List("-pagewidth", "80", "-color:never", "-Ystop-after:explicitSelf", "-Ycheck-constraint-deps", "-classpath", TestConfiguration.basicClasspath) private def compile(files: List[JPath]): Seq[String] = { val opts = toolArgsFor(files).get(ToolName.Scalac).getOrElse(Nil) diff --git a/compiler/test/dotty/tools/dotc/transform/TypeTestsCastsTest.scala b/compiler/test/dotty/tools/dotc/transform/TypeTestsCastsTest.scala index 0db7a6072579..9f6f155a2ac2 100644 --- a/compiler/test/dotty/tools/dotc/transform/TypeTestsCastsTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/TypeTestsCastsTest.scala @@ -6,6 +6,8 @@ import core.* import Contexts.*, Decorators.*, Denotations.*, SymDenotations.*, Symbols.*, Types.* import Annotations.* +import dotty.tools.dotc.util.Spans.Span + import org.junit.Test import org.junit.Assert.* @@ -15,7 +17,7 @@ class TypeTestsCastsTest extends DottyTest: @Test def orL = checkFound(List(StringType, LongType), OrType(LongType, StringType, false)) @Test def orR = checkFound(List(LongType, StringType), OrType(StringType, LongType, false)) - @Test def annot = checkFound(List(StringType, LongType), AnnotatedType(OrType(LongType, StringType, false), Annotation(defn.UncheckedAnnot))) + @Test def annot = checkFound(List(StringType, LongType), AnnotatedType(OrType(LongType, StringType, false), Annotation(defn.UncheckedAnnot, Span(0)))) @Test def andL = checkFound(List(StringType), AndType(StringType, AnyType)) @Test def andR = checkFound(List(StringType), AndType(AnyType, StringType)) diff --git a/compiler/test/dotty/tools/dotc/transform/patmat/SpaceEngineTest.scala b/compiler/test/dotty/tools/dotc/transform/patmat/SpaceEngineTest.scala new file mode 100644 index 000000000000..c13ef0532348 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/transform/patmat/SpaceEngineTest.scala @@ -0,0 +1,64 @@ +package dotty.tools +package dotc +package transform +package patmat + +import core.*, Annotations.*, Contexts.*, Decorators.*, Flags.*, Names.*, StdNames.*, Symbols.*, Types.* +import ast.*, tpd.* + +import vulpix.TestConfiguration, TestConfiguration.basicClasspath + +import org.junit, junit.Test, junit.Assert.* + +class SpaceEngineTest: + import SpaceEngine.* + + @Test def isSubspaceTest1: Unit = inCompilerContext(basicClasspath) { + // Testing the property of `isSubspace` that: + // isSubspace(a, b) <=> simplify(simplify(a) - simplify(a)) == Empty + // Previously there were no simplify calls, + // and this is a counter-example, + // for which you need either to simplify(b) or simplify the minus result. + + val tp = defn.ConsType.appliedTo(defn.AnyType) + val unappTp = requiredMethod("scala.collection.immutable.::.unapply").termRef + val params = List(Empty, Typ(tp)) + + val a = Prod(tp, unappTp, params) + val b = Empty + + val res1 = isSubspace(a, b) + + val a2 = simplify(a) + val b2 = simplify(b) + val rem1 = minus(a2, b2) + val rem2 = simplify(rem1) + val res2 = rem2 == Empty + + assertEquals( + i"""|isSubspace: + | + |isSubspace(a, b) = $res1 + | + |Should be equivalent to: + |simplify(simplify(a) - simplify(b)) == Empty + |simplify(a2 - b2) == Empty + |simplify(rem1) == Empty + |rem2 == Empty + | + |a = ${show(a)} + |b = ${show(b)} + |a2 = ${show(a2)} + |b2 = ${show(b2)} + |rem1 = ${show(rem1)} + |rem2 = ${show(rem2)} + | + |a = ${a.toString} + |b = ${b.toString} + |a2 = ${a2.toString} + |b2 = ${b2.toString} + |rem1 = ${rem1.toString} + |rem2 = ${rem2.toString} + | + |""".stripMargin, res1, res2) + } diff --git a/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala new file mode 100644 index 000000000000..b08062913dac --- /dev/null +++ b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala @@ -0,0 +1,57 @@ +package dotty.tools +package dotc +package typer + +// Modelling the decision in IsFullyDefined +object InstantiateModel: + enum LB { case NN; case LL; case L1 }; import LB.* + enum UB { case AA; case UU; case U1 }; import UB.* + enum Var { case V; case NotV }; import Var.* + enum MSe { case M; case NotM }; import MSe.* + enum Bot { case Fail; case Ok; case Flip }; import Bot.* + enum Act { case Min; case Max; case ToMax; case Skip; case False }; import Act.* + + // NN/AA = Nothing/Any + // LL/UU = the original bounds, on the type parameter + // L1/U1 = the constrained bounds, on the type variable + // V = variance >= 0 ("non-contravariant") + // MSe = minimisedSelected + // Bot = IfBottom + // ToMax = delayed maximisation, via addition to toMaximize + // Skip = minimisedSelected "hold off instantiating" + // False = return false + + // there are 9 combinations: + // # | LB | UB | d | // d = direction + // --+----+----+---+ + // 1 | L1 | AA | - | L1 <: T + // 2 | L1 | UU | - | L1 <: T <: UU + // 3 | LL | U1 | + | LL <: T <: U1 + // 4 | NN | U1 | + | T <: U1 + // 5 | L1 | U1 | 0 | L1 <: T <: U1 + // 6 | LL | UU | 0 | LL <: T <: UU + // 7 | LL | AA | 0 | LL <: T + // 8 | NN | UU | 0 | T <: UU + // 9 | NN | AA | 0 | T + + def decide(lb: LB, ub: UB, v: Var, bot: Bot, m: MSe): Act = (lb, ub) match + case (L1, AA) => Min + case (L1, UU) => Min + case (LL, U1) => Max + case (NN, U1) => Max + + case (L1, U1) => if m==M || v==V then Min else ToMax + case (LL, UU) => if m==M || v==V then Min else ToMax + case (LL, AA) => if m==M || v==V then Min else ToMax + + case (NN, UU) => bot match + case _ if m==M => Max + //case Ok if v==V => Min // removed, i14218 fix + case Fail if v==V => False + case _ => ToMax + + case (NN, AA) => bot match + case _ if m==M => Skip + case Ok if v==V => Min + case Fail if v==V => False + case _ => ToMax diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index 866647476888..ecdfeb512e1b 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -347,26 +347,11 @@ class ReplCompilerTests extends ReplTest: assertEquals("java.lang.AssertionError: assertion failed", all.head) } - @Test def i14491 = - initially { - run("import language.experimental.fewerBraces") - } andThen { - run("""|val x = Seq(7,8,9).apply: - | 1 - |""".stripMargin) - assertEquals("val x: Int = 8", storedOutput().trim) - } - initially { - run("""|import language.experimental.fewerBraces - |import language.experimental.fewerBraces as _ - |""".stripMargin) - } andThen { - run("""|val x = Seq(7,8,9).apply: - | 1 - |""".stripMargin) - assert("expected error if fewerBraces is unimported", - lines().exists(_.contains("missing arguments for method apply"))) - } + @Test def `i13097 expect lambda after colon` = contextually: + assert(ParseResult.isIncomplete("val x = List(42).foreach:")) + + @Test def `i13097 expect template after colon` = contextually: + assert(ParseResult.isIncomplete("class C:")) object ReplCompilerTests: diff --git a/compiler/test/dotty/tools/repl/ScriptedTests.scala b/compiler/test/dotty/tools/repl/ScriptedTests.scala index 5c3a32cd40f8..dc809228e86b 100644 --- a/compiler/test/dotty/tools/repl/ScriptedTests.scala +++ b/compiler/test/dotty/tools/repl/ScriptedTests.scala @@ -3,12 +3,16 @@ package tools package repl import org.junit.Test +import org.junit.experimental.categories.Category /** Runs all tests contained in `compiler/test-resources/repl/` */ class ScriptedTests extends ReplTest { @Test def replTests = scripts("/repl").foreach(testFile) + @Category(Array(classOf[BootstrappedOnlyTests])) + @Test def replMacrosTests = scripts("/repl-macros").foreach(testFile) + @Test def typePrinterTests = scripts("/type-printer").foreach(testFile) } diff --git a/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala b/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala index 5a96976bd867..7272c10aa003 100644 --- a/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala +++ b/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala @@ -32,6 +32,20 @@ class ShadowingBatchTests extends ErrorMessagesTest: ictx.setSetting(classpath, classpath.value + File.pathSeparator + dir.jpath.toAbsolutePath) } + @Test def io = + val lib = """|package io.foo + | + |object Bar { + | def baz: Int = 42 + |} + |""".stripMargin + val app = """|object Main: + | def main(args: Array[String]): Unit = + | println(io.foo.Bar.baz) + |""".stripMargin + checkMessages(lib).expectNoErrors + checkMessages(app).expectNoErrors + @Test def file = checkMessages("class C(val c: Int)").expectNoErrors checkMessages("object rsline1 {\n def line1 = new C().c\n}").expect { (_, msgs) => diff --git a/compiler/test/dotty/tools/repl/ShadowingTests.scala b/compiler/test/dotty/tools/repl/ShadowingTests.scala index 62a2322e38f0..98aa58a62a15 100644 --- a/compiler/test/dotty/tools/repl/ShadowingTests.scala +++ b/compiler/test/dotty/tools/repl/ShadowingTests.scala @@ -76,6 +76,18 @@ class ShadowingTests extends ReplTest(options = ShadowingTests.options): Files.delete(file) end compileShadowed + @Test def io = shadowedScriptedTest(name = "io", + shadowed = """|package io.foo + | + |object Bar { + | def baz: Int = 42 + |} + |""".stripMargin, + script = """|scala> io.foo.Bar.baz + |val res0: Int = 42 + |""".stripMargin + ) + @Test def i7635 = shadowedScriptedTest(name = "", shadowed = "class C(val c: Int)", script = @@ -122,13 +134,18 @@ class ShadowingTests extends ReplTest(options = ShadowingTests.options): |val y: String = foo | |scala> if (true) x else y - |val res0: Matchable = 42 + |val res0: Int | String = 42 |""".stripMargin.linesIterator.toList ) ShadowingTests.createSubDir("util") testScript(name = "", """|scala> import util.Try + |-- [E008] Not Found Error: ----------------------------------------------------- + |1 | import util.Try + | | ^^^ + | | value Try is not a member of util + |1 error found | |scala> object util { class Try { override def toString = "you've gotta try!" } } |// defined object util diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index 9cdb896963f1..910584a9b5e7 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -233,4 +233,8 @@ class TabcompleteTests extends ReplTest { val comp = tabComplete("BigInt(1).") assertTrue(comp.distinct.nonEmpty) } + + @Test def i9334 = initially { + assert(tabComplete("class Foo[T]; classOf[Foo].").contains("getName")) + } } diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala index bfedc338f25a..75918674146c 100644 --- a/compiler/test/dotty/tools/utils.scala +++ b/compiler/test/dotty/tools/utils.scala @@ -17,8 +17,10 @@ import scala.util.control.{ControlThrowable, NonFatal} import dotc.config.CommandLineParser +object Dummy + def scripts(path: String): Array[File] = { - val dir = new File(this.getClass.getResource(path).getPath) + val dir = new File(Dummy.getClass.getResource(path).getPath) assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") dir.listFiles.filter { f => val path = if f.isDirectory then f.getPath + "/" else f.getPath diff --git a/compiler/test/dotty/tools/vulpix/FailedTestInfo.scala b/compiler/test/dotty/tools/vulpix/FailedTestInfo.scala new file mode 100644 index 000000000000..c7172f54aadc --- /dev/null +++ b/compiler/test/dotty/tools/vulpix/FailedTestInfo.scala @@ -0,0 +1,3 @@ +package dotty.tools.vulpix + +case class FailedTestInfo(title: String, extra: String) diff --git a/compiler/test/dotty/tools/vulpix/FileDiff.scala b/compiler/test/dotty/tools/vulpix/FileDiff.scala index c060c4d3938c..5e882be6425a 100644 --- a/compiler/test/dotty/tools/vulpix/FileDiff.scala +++ b/compiler/test/dotty/tools/vulpix/FileDiff.scala @@ -50,21 +50,6 @@ object FileDiff { outFile.writeAll(content.mkString("", EOL, EOL)) } - def checkAndDump(sourceTitle: String, actualLines: Seq[String], checkFilePath: String): Boolean = { - val outFilePath = checkFilePath + ".out" - FileDiff.check(sourceTitle, actualLines, checkFilePath) match { - case Some(msg) => - FileDiff.dump(outFilePath, actualLines) - println(msg) - println(FileDiff.diffMessage(checkFilePath, outFilePath)) - false - case _ => - val jOutFilePath = Paths.get(outFilePath) - Files.deleteIfExists(jOutFilePath) - true - } - } - def checkAndDumpOrUpdate(sourceTitle: String, actualLines: Seq[String], checkFilePath: String): Boolean = { val outFilePath = checkFilePath + ".out" FileDiff.check(sourceTitle, actualLines, checkFilePath) match { diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index 44565c44b681..bccbcbee29e1 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -12,7 +12,7 @@ import java.nio.file.{Files, NoSuchFileException, Path, Paths} import java.nio.charset.{Charset, StandardCharsets} import java.text.SimpleDateFormat import java.util.{HashMap, Timer, TimerTask} -import java.util.concurrent.{TimeUnit, TimeoutException, Executors => JExecutors} +import java.util.concurrent.{ExecutionException, TimeUnit, TimeoutException, Executors => JExecutors} import scala.collection.mutable import scala.io.{Codec, Source} @@ -57,6 +57,9 @@ trait ParallelTesting extends RunnerOrchestration { self => /** Tests should override the checkfiles with the current output */ def updateCheckFiles: Boolean + /** Contains a list of failed tests to run, if list is empty no tests will run */ + def failedTests: Option[List[String]] + /** A test source whose files or directory of files is to be compiled * in a specific way defined by the `Test` */ @@ -204,6 +207,14 @@ trait ParallelTesting extends RunnerOrchestration { self => protected def shouldSkipTestSource(testSource: TestSource): Boolean = false + protected def shouldReRun(testSource: TestSource): Boolean = + failedTests.forall(rerun => testSource match { + case JointCompilationSource(_, files, _, _, _, _) => + rerun.exists(filter => files.exists(file => file.getPath.contains(filter))) + case SeparateCompilationSource(_, dir, _, _) => + rerun.exists(dir.getPath.contains) + }) + private trait CompilationLogic { this: Test => def suppressErrors = false @@ -359,7 +370,7 @@ trait ParallelTesting extends RunnerOrchestration { self => case SeparateCompilationSource(_, dir, _, _) => testFilter.exists(dir.getPath.contains) } - filteredByName.filterNot(shouldSkipTestSource(_)) + filteredByName.filterNot(shouldSkipTestSource(_)).filter(shouldReRun(_)) /** Total amount of test sources being compiled by this test */ val sourceCount = filteredSources.length @@ -409,14 +420,14 @@ trait ParallelTesting extends RunnerOrchestration { self => synchronized { reproduceInstructions.append(ins) } /** The test sources that failed according to the implementing subclass */ - private val failedTestSources = mutable.ArrayBuffer.empty[String] + private val failedTestSources = mutable.ArrayBuffer.empty[FailedTestInfo] protected final def failTestSource(testSource: TestSource, reason: Failure = Generic) = synchronized { val extra = reason match { case TimeoutFailure(title) => s", test '$title' timed out" case JavaCompilationFailure(msg) => s", java test sources failed to compile with: \n$msg" case Generic => "" } - failedTestSources.append(testSource.title + s" failed" + extra) + failedTestSources.append(FailedTestInfo(testSource.title, s" failed" + extra)) fail(reason) } @@ -483,6 +494,12 @@ trait ParallelTesting extends RunnerOrchestration { self => .and("-d", targetDir.getPath) .withClasspath(targetDir.getPath) + def waitForJudiciously(process: Process): Int = + try process.waitFor() + catch case _: InterruptedException => + try if process.waitFor(5L, TimeUnit.MINUTES) then process.exitValue() else -2 + finally Thread.currentThread.interrupt() + def compileWithJavac(fs: Array[String]) = if (fs.nonEmpty) { val fullArgs = Array( "javac", @@ -492,7 +509,7 @@ trait ParallelTesting extends RunnerOrchestration { self => val process = Runtime.getRuntime.exec(fullArgs) val output = Source.fromInputStream(process.getErrorStream).mkString - if (process.waitFor() != 0) Some(output) + if waitForJudiciously(process) != 0 then Some(output) else None } else None @@ -550,7 +567,7 @@ trait ParallelTesting extends RunnerOrchestration { self => def addToLast(str: String): Unit = diagnostics match case head :: tail => - diagnostics = Diagnostic.Error(s"${head.msg.rawMessage}$str", head.pos) :: tail + diagnostics = Diagnostic.Error(s"${head.msg.message}$str", head.pos) :: tail case Nil => var inError = false for line <- errorsText.linesIterator do @@ -665,7 +682,11 @@ trait ParallelTesting extends RunnerOrchestration { self => for fut <- eventualResults do try fut.get() - catch case ex: Exception => + catch + case ee: ExecutionException if ee.getCause.isInstanceOf[InterruptedException] => + System.err.println("Interrupted (probably running after shutdown)") + ee.printStackTrace() + case ex: Exception => System.err.println(ex.getMessage) ex.printStackTrace() @@ -740,8 +761,11 @@ trait ParallelTesting extends RunnerOrchestration { self => case _ => } case Failure(output) => - echo(s"Test '${testSource.title}' failed with output:") - echo(output) + if output == "" then + echo(s"Test '${testSource.title}' failed with no output") + else + echo(s"Test '${testSource.title}' failed with output:") + echo(output) failTestSource(testSource) case Timeout => echo("failed because test " + testSource.title + " timed out") diff --git a/compiler/test/dotty/tools/vulpix/SummaryReport.scala b/compiler/test/dotty/tools/vulpix/SummaryReport.scala index e216ac1c5d4f..74612387015f 100644 --- a/compiler/test/dotty/tools/vulpix/SummaryReport.scala +++ b/compiler/test/dotty/tools/vulpix/SummaryReport.scala @@ -3,7 +3,6 @@ package tools package vulpix import scala.language.unsafeNulls - import scala.collection.mutable import dotc.reporting.TestReporter @@ -23,7 +22,7 @@ trait SummaryReporting { def reportPassed(): Unit /** Add the name of the failed test */ - def addFailedTest(msg: String): Unit + def addFailedTest(msg: FailedTestInfo): Unit /** Add instructions to reproduce the error */ def addReproduceInstruction(instr: String): Unit @@ -49,7 +48,7 @@ trait SummaryReporting { final class NoSummaryReport extends SummaryReporting { def reportFailed(): Unit = () def reportPassed(): Unit = () - def addFailedTest(msg: String): Unit = () + def addFailedTest(msg: FailedTestInfo): Unit = () def addReproduceInstruction(instr: String): Unit = () def addStartingMessage(msg: String): Unit = () def addCleanup(f: () => Unit): Unit = () @@ -66,7 +65,7 @@ final class SummaryReport extends SummaryReporting { import scala.jdk.CollectionConverters._ private val startingMessages = new java.util.concurrent.ConcurrentLinkedDeque[String] - private val failedTests = new java.util.concurrent.ConcurrentLinkedDeque[String] + private val failedTests = new java.util.concurrent.ConcurrentLinkedDeque[FailedTestInfo] private val reproduceInstructions = new java.util.concurrent.ConcurrentLinkedDeque[String] private val cleanUps = new java.util.concurrent.ConcurrentLinkedDeque[() => Unit] @@ -79,7 +78,7 @@ final class SummaryReport extends SummaryReporting { def reportPassed(): Unit = passed += 1 - def addFailedTest(msg: String): Unit = + def addFailedTest(msg: FailedTestInfo): Unit = failedTests.add(msg) def addReproduceInstruction(instr: String): Unit = @@ -108,7 +107,8 @@ final class SummaryReport extends SummaryReporting { startingMessages.asScala.foreach(rep.append) - failedTests.asScala.map(x => s" $x\n").foreach(rep.append) + failedTests.asScala.map(x => s" ${x.title}${x.extra}\n").foreach(rep.append) + TestReporter.writeFailedTests(failedTests.asScala.toList.map(_.title)) // If we're compiling locally, we don't need instructions on how to // reproduce failures diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala index 3ea364cc3a68..5d2992b50a09 100644 --- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala +++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala @@ -49,6 +49,7 @@ object TestConfiguration { withCompilerClasspath + File.pathSeparator + mkClasspath(List(Properties.dottyTastyInspector)) lazy val scalaJSClasspath = mkClasspath(List( + Properties.scalaJSJavalib, Properties.scalaJSLibrary, Properties.dottyLibraryJS )) diff --git a/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala b/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala index 75af0aa94893..0044ab8a94e5 100644 --- a/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala +++ b/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala @@ -30,6 +30,7 @@ object VulpixMetaTests extends ParallelTesting { def isInteractive = false // Don't beautify output for interactive use. def testFilter = Nil // Run all the tests. def updateCheckFiles: Boolean = false + def failedTests = None @AfterClass def tearDown() = this.cleanup() diff --git a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala index 8a32fd636e76..baf61c845d96 100644 --- a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala +++ b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala @@ -108,6 +108,7 @@ object VulpixUnitTests extends ParallelTesting { def isInteractive = !sys.env.contains("DRONE") def testFilter = Nil def updateCheckFiles: Boolean = false + def failedTests = None @AfterClass def tearDown() = this.cleanup() diff --git a/compiler/test/worksheets/baseTypetest.sc b/compiler/test/worksheets/baseTypetest.sc index 001f1e3b3eaa..4dbd68a6fdc7 100644 --- a/compiler/test/worksheets/baseTypetest.sc +++ b/compiler/test/worksheets/baseTypetest.sc @@ -22,5 +22,5 @@ object baseTypetest extends DottyTest { defn.StringClass isSubClass defn.NullClass //> res4: Boolean = false defn.StringClass.typeRef.baseType(defn.NullClass) //> res5: dotty.tools.dotc.core.Types.Type = NoType - + } \ No newline at end of file diff --git a/compiler/test/worksheets/denotTest.sc b/compiler/test/worksheets/denotTest.sc index 222a347b6947..aa3fb383bd6f 100644 --- a/compiler/test/worksheets/denotTest.sc +++ b/compiler/test/worksheets/denotTest.sc @@ -7,7 +7,7 @@ import Types._, Symbols._ object denotTest extends DottyTest { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + val str = defn.StringClass.typeRef //> str : dotty.tools.dotc.core.Types.TypeRef = TypeRef(ThisType(module class l //| ang#57),String) val d= str.member("getBytes".toTermName) //> d : dotty.tools.dotc.core.Denotations.Denotation = val getBytes val g @@ -47,7 +47,7 @@ object denotTest extends DottyTest { //| a#35),Array), scala$Array$$T, TypeAlias(TypeRef(ThisType(module class scala# //| 35),Char))), TypeRef(ThisType(module class scala#35),Int), TypeRef(ThisType( //| module class scala#35),Int)), TypeRef(ThisType(module class lang#57),String) - //| ), JavaMethodType(List(x$0), List(RefinedType(TypeRef(ThisType(module class + //| ), JavaMethodType(List(x$0), List(RefinedType(TypeRef(ThisType(module class //| scala#35),Array), scala$Array$$T, TypeAlias(TypeRef(ThisType(module class sc //| ala#35),Char)))), TypeRef(ThisType(module class lang#57),String)), JavaMetho //| dType(List(x$0), List(TypeRef(ThisType(module class scala#35),Any)), TypeRef diff --git a/compiler/test/worksheets/nesting.sc b/compiler/test/worksheets/nesting.sc index a6fc924320a0..bb3e9a71146e 100644 --- a/compiler/test/worksheets/nesting.sc +++ b/compiler/test/worksheets/nesting.sc @@ -2,7 +2,7 @@ package dotty.tools.dotc.core object nesting { class C { - + class D { private def x = "D" def show = x @@ -10,7 +10,7 @@ object nesting { println(x) } } - + val foo: D = { class D extends C.this.D { private def x = "foo.D" @@ -21,11 +21,11 @@ object nesting { new D } } - + val c = new C //> c : dotty.tools.dotc.core.nesting.C = dotty.tools.dotc.core.nesting$C@1a84d //| a23 val d = c.foo //> d : dotty.tools.dotc.core.nesting.c.D = dotty.tools.dotc.core.nesting$C$D$1 //| @2705d88a d.show //> res0: String = foo.D - + } \ No newline at end of file diff --git a/compiler/test/worksheets/periodtest.sc b/compiler/test/worksheets/periodtest.sc index 09c02da19a10..68a7cc43b20e 100644 --- a/compiler/test/worksheets/periodtest.sc +++ b/compiler/test/worksheets/periodtest.sc @@ -2,9 +2,9 @@ package dotty.tools.dotc.core object periodtest { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + import Periods._ - + val p1 = Period(1, 2, 7) //> p1 : dotty.tools.dotc.core.Periods.Period = Period(2..7, run = 1) val p2 = Period(1, 3, 7) //> p2 : dotty.tools.dotc.core.Periods.Period = Period(3..7, run = 1) p1 contains p2 //> res0: Boolean = true diff --git a/compiler/test/worksheets/positiontest.sc b/compiler/test/worksheets/positiontest.sc index 11cc54dbeab9..b152368145f1 100644 --- a/compiler/test/worksheets/positiontest.sc +++ b/compiler/test/worksheets/positiontest.sc @@ -5,7 +5,7 @@ import Positions._ object positiontest { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + val p = Position(0, 1, 0) //> p : dotty.tools.dotc.util.Positions.Position = [0..1] val p2 = Position(0, 2) //> p2 : dotty.tools.dotc.util.Positions.Position = [0..2] val p3 = Position(1, 0) //> p3 : dotty.tools.dotc.util.Positions.Position = [no position] diff --git a/compiler/test/worksheets/testnames.sc b/compiler/test/worksheets/testnames.sc index 282b07d4edb7..8f042b7036fd 100644 --- a/compiler/test/worksheets/testnames.sc +++ b/compiler/test/worksheets/testnames.sc @@ -2,7 +2,7 @@ package dotty.tools.dotc.core object testnames { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + import Names._ val n = termName("hello") //> n : dotty.tools.dotc.core.Names.TermName = hello val tn = n.toTypeName //> tn : dotty.tools.dotc.core.Names.TypeName = hello @@ -10,7 +10,7 @@ object testnames { assert(tn.toTermName eq n) assert(tn.toLocalName eq ln) assert(n.toLocalName eq ln) - + n == tn //> res0: Boolean = false n == ln //> res1: Boolean = false n eq tn //> res2: Boolean = false @@ -19,7 +19,7 @@ object testnames { val foo = encodedTermName("++") //> foo : dotty.tools.dotc.core.Names.TermName = $plus$plus foo.hashCode //> res5: Int = 5 foo.toTypeName.hashCode //> res6: Int = -5 - + val nfoo = n ++ foo //> nfoo : dotty.tools.dotc.core.testnames.n.ThisName = hello$plus$plus nfoo contains '$' //> res7: Boolean = true nfoo.replace('$', '.') //> res8: dotty.tools.dotc.core.testnames.nfoo.ThisName = hello.plus.plus @@ -36,7 +36,7 @@ object testnames { termName("abc") //> res18: dotty.tools.dotc.core.Names.TermName = abc nfoo.filter(_ >= 'l') //> res19: dotty.tools.dotc.core.Names.Name = lloplusplus nfoo map (_.toUpper) //> res20: dotty.tools.dotc.core.Names.Name = HELLO$PLUS$PLUS - + import Decorators._ val local = "local".toTermName.toLocalName //> local : dotty.tools.dotc.core.Names.LocalName = local diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index 67210f0d6b4f..8b9ec41a7f8c 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -61,15 +61,31 @@ classpathArgs () { CLASS_PATH+="$(find_lib "*tasty-core*")$PSEP" CLASS_PATH+="$(find_lib "*scala3-tasty-inspector*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-0*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-html-parser*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-anchorlink*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-autolink*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-emoji*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-strikethrough*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-tables*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-tasklist*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-wikilink*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-yaml-front-matter*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-ext-tables*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-ext-ins*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-ext-superscript*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-ast*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-data*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-dependency*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-misc*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-format*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-sequence*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-builder*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-collection*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-visitor*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-options*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-html*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-ast*")$PSEP" CLASS_PATH+="$(find_lib "*liqp*")$PSEP" CLASS_PATH+="$(find_lib "*jsoup*")$PSEP" CLASS_PATH+="$(find_lib "*jackson-dataformat-yaml*")$PSEP" @@ -80,7 +96,6 @@ classpathArgs () { CLASS_PATH+="$(find_lib "*jline-reader*")$PSEP" CLASS_PATH+="$(find_lib "*jline-terminal-3*")$PSEP" CLASS_PATH+="$(find_lib "*jline-terminal-jna*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" CLASS_PATH+="$(find_lib "*autolink-0.6*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-jira-converter*")$PSEP" @@ -93,9 +108,6 @@ classpathArgs () { CLASS_PATH+="$(find_lib "*protobuf-java*")$PSEP" CLASS_PATH+="$(find_lib "*util-interface*")$PSEP" CLASS_PATH+="$(find_lib "*jna-5*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-tables*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-ins*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-superscript*")$PSEP" CLASS_PATH+="$(find_lib "*antlr4-runtime*")$PSEP" jvm_cp_args="-classpath \"$CLASS_PATH\"" diff --git a/docs/_assets/css/color-brewer.css b/docs/_assets/css/color-brewer.css deleted file mode 100644 index b832a05ebc51..000000000000 --- a/docs/_assets/css/color-brewer.css +++ /dev/null @@ -1,66 +0,0 @@ -/* - -Colorbrewer theme -Original: https://github.com/mbostock/colorbrewer-theme (c) Mike Bostock -Ported by Fabrício Tavares de Oliveira - -*/ - -/* .hljs { - background: transparent; -} - -.hljs, -.hljs-subst { - color: #000; -} */ - -/*.hljs-string, -.hljs-meta, -.hljs-symbol, -.hljs-template-tag, -.hljs-template-variable, -.hljs-addition { - color: #756bb1; -}*/ - -/* .hljs-comment, -.hljs-quote { - color: #636363; -} - -.hljs-number, -.hljs-regexp, -.hljs-literal, -.hljs-bullet, -.hljs-link { - color: #31a354; -} - -.hljs-deletion, -.hljs-variable { - color: #88f; -} */ - -/*.hljs-keyword, -.hljs-selector-tag, -.hljs-title, -.hljs-section, -.hljs-built_in, -.hljs-doctag, -.hljs-type, -.hljs-tag, -.hljs-name, -.hljs-selector-id, -.hljs-selector-class, -.hljs-strong { - color: #3182bd; -}*/ - -/* .hljs-emphasis { - font-style: italic; -} - -.hljs-attribute { - color: #e6550d; -} */ diff --git a/docs/_assets/css/dottydoc.css b/docs/_assets/css/dottydoc.css index ca7613835ff0..6408fac3fab4 100644 --- a/docs/_assets/css/dottydoc.css +++ b/docs/_assets/css/dottydoc.css @@ -1,243 +1,23 @@ -html, body { - font-weight: 300; - height: 100%; -} - -main.container { - min-height: 100vh; - padding: 15px 15px; - padding-bottom: 45px; /* prevents the content to be hidden by the gitter sidecar */ -} - -.container img { - width: 100%; - height: auto; -} - -/* headers */ -main header { - border-bottom: 1px solid rgba(0,0,0,.1); - margin-bottom: 16px; - padding-bottom: 16px; -} - -main > h1 { - margin-bottom: 20px; -} - .byline { font-size: 14px; + display: flex; + margin-top: 10px; } -.byline, .byline a { - color: grey; -} -.byline .author { - display: block; -} - -/* indexes */ -ul.post-list { - list-style: none; - padding-left: 0; -} -.post-list h2 { - margin-bottom: 0; -} - -/* headings anchors */ -a.anchor { - color: transparent; - margin-left: -23px; - padding-right: 3px; - transition: color .4s ease-out; -} - -a.anchor::before { - content: "\f0c1"; - font-family: "Font Awesome 5 Free"; - font-weight: 900; - font-size: 20px; -} - -h1:hover a.anchor, -h2:hover a.anchor, -h3:hover a.anchor, -h4:hover a.anchor, -h5:hover a.anchor { - color: lightgrey; - text-decoration: none; -} - -h1:hover a.anchor:hover, -h2:hover a.anchor:hover, -h3:hover a.anchor:hover, -h4:hover a.anchor:hover, -h5:hover a.anchor:hover { - color: var(--secondary); -} - -/* blog footer */ -.blog-author { - color: gray; -} - -.blog-author img#author-img { +.byline img#author-img { width: auto; height: auto; - max-width:100px; - max-height:100px; - border-radius: 50%; + max-width: 50px; + border-radius: 10px; } -/* api docs */ -.api span.letter-anchor { - float: left; - width: 50px; - height: 50px; - border-radius: 50px; - color: white; - margin-top: 6px; - margin-right: 8px; - line-height: 50px; - text-align: center; - text-decoration: none; - font-size: 43px; - font-family: var(--font-family-sans-serif); -} -.api span.letter-anchor.object { - line-height: 48px; -} -.api span.letter-anchor.class { - line-height: 48px; - padding-right: 3px; -} -.letter-anchor.object { - background: #2c6c8d; -} -.letter-anchor.class { - background: #44ad7d; -} -.letter-anchor.trait { - background: #19aacf; -} -.letter-anchor.enum { - background: #7803fc; -} -.letter-anchor.package { - background: #2c6c8d; -} - -.api header { - font-family: var(--font-family-sans-serif); +.byline, .byline a { + color: grey; } -.api header .name-prefix { +.byline .author { display: block; } -.api header .name-suffix { - display: inline-block; -} - -.api header h1 { - margin: -13px 8px 0 0; - display: inline-block; -} -.api h2 { - margin-top: 1rem; -} -.api h3 { - display: inline; - margin: 0; - font: inherit; - font-weight: bold; -} - -/* improved display and wrapping of parameters */ -.api .params, .api .type-params { - display: inline-flex; - flex-flow: wrap; -} - -/* api layout */ -.wide-table { - display: table; - width: 100%; -} -.api .member:hover { - background: var(--doc-bg); - cursor: pointer; -} -.api .left-column { - white-space: nowrap; - padding-left: 1em; - border-left: 3px solid transparent;/* table rows cannot have borders*/ - font-family: var(--font-family-monospace); - text-align: right; - width: 1px; -} -.api .member:hover .left-column { - border-left: 3px solid var(--secondary); -} -.api .right-column { - display: inline; - text-align: right; - font-family: var(--font-family-monospace); -} - -/* admonitions */ -blockquote { - padding: 0 1em; - color: #777; - border-left: 0.25em solid #ddd; -} - -aside { - padding: 15px; - margin: 10px 0; -} - -aside.warning { - border-left: 3px solid var(--red500); - background-color: var(--aside-warning-bg); -} - -aside.notice { - border-left: 3px solid #4c97e4; - background-color: #e4ebff; -} - -aside.success { - border-left: 3px solid #36bf1d; - background-color: #ebfddd; -} - -/* media queries for bigger screens (dottydoc is mobile-first) */ -@media (min-width: 576px) { - .byline .author { - display: inline; - margin-left: 1em; - } - main.container { - padding: 15px 30px; - } -} -@media (min-width: 768px) { - .api .member { - display: table-row; - } - .api .left-column { - display: table-cell; - } - .api .right-column { - display: flex; - flex-flow: wrap; - } - main.container { - padding: 15px 45px; - } -} -header { - position: static !important; - width: 100% !important; +.byline .secondary-infos{ + margin-left: 10px; } diff --git a/docs/_assets/images/contribution/breakpoint.jpg b/docs/_assets/images/contribution/breakpoint.jpg new file mode 100644 index 000000000000..748088c269c9 Binary files /dev/null and b/docs/_assets/images/contribution/breakpoint.jpg differ diff --git a/docs/_assets/images/contribution/call-stack.jpg b/docs/_assets/images/contribution/call-stack.jpg new file mode 100644 index 000000000000..8fac2371a6c1 Binary files /dev/null and b/docs/_assets/images/contribution/call-stack.jpg differ diff --git a/docs/_assets/images/contribution/conditional-breakpoint.jpg b/docs/_assets/images/contribution/conditional-breakpoint.jpg new file mode 100644 index 000000000000..11bab89d3f47 Binary files /dev/null and b/docs/_assets/images/contribution/conditional-breakpoint.jpg differ diff --git a/docs/_assets/images/contribution/create-config.jpg b/docs/_assets/images/contribution/create-config.jpg new file mode 100644 index 000000000000..60479233ee70 Binary files /dev/null and b/docs/_assets/images/contribution/create-config.jpg differ diff --git a/docs/_assets/images/contribution/debug-console.jpg b/docs/_assets/images/contribution/debug-console.jpg new file mode 100644 index 000000000000..c9a669019d65 Binary files /dev/null and b/docs/_assets/images/contribution/debug-console.jpg differ diff --git a/docs/_assets/images/contribution/import-build.jpg b/docs/_assets/images/contribution/import-build.jpg new file mode 100644 index 000000000000..79be8450cd4a Binary files /dev/null and b/docs/_assets/images/contribution/import-build.jpg differ diff --git a/docs/_assets/images/contribution/launch-config-file.jpg b/docs/_assets/images/contribution/launch-config-file.jpg new file mode 100644 index 000000000000..4270f6b2326a Binary files /dev/null and b/docs/_assets/images/contribution/launch-config-file.jpg differ diff --git a/docs/_assets/images/contribution/start-debugger.jpg b/docs/_assets/images/contribution/start-debugger.jpg new file mode 100644 index 000000000000..edf17d700afc Binary files /dev/null and b/docs/_assets/images/contribution/start-debugger.jpg differ diff --git a/docs/_assets/images/contribution/toolbar.jpg b/docs/_assets/images/contribution/toolbar.jpg new file mode 100644 index 000000000000..22ae60ba27e1 Binary files /dev/null and b/docs/_assets/images/contribution/toolbar.jpg differ diff --git a/docs/_blog/index.html b/docs/_blog/index.html index 055b069b303d..a59b8702d326 100644 --- a/docs/_blog/index.html +++ b/docs/_blog/index.html @@ -1,6 +1,6 @@ --- layout: static-site-main -title: Blog +title: Blog (archive) ---

{{ page.title }}

diff --git a/docs/_docs/contributing/architecture/context.md b/docs/_docs/contributing/architecture/context.md new file mode 100644 index 000000000000..cd38ee437867 --- /dev/null +++ b/docs/_docs/contributing/architecture/context.md @@ -0,0 +1,53 @@ +--- +layout: doc-page +title: Contexts +--- + +`dotc` has almost no global state (with the exception of the name table, +which hashes strings into unique names). Instead, all +essential bits of information that can vary over a compiler [run](./lifecycle.md) are collected +in a `Context` (defined in [Contexts]). + +Most methods in the compiler depend on an implicit anonymous `Context` parameter, +and a typical definition looks like the following: +```scala +import dotty.tools.dotc.Contexts.{Context, ctx} + +def doFoo(using Context): Unit = + val current = ctx.run // access the Context parameter with `ctx` +``` + +## Memory Leaks +> **Careful:** Contexts can be heavy so beware of memory leaks + +It is good practice to ensure that implicit contexts are not +captured in closures or other long-lived objects, in order to avoid space leaks +in the case where a closure can survive several compiler runs (e.g. a +lazy completer for a library class that is never required). In that case, the +convention is that the `Context` be an explicit parameter, to track its usage. + +## Context Properties + +| Context property | description | +|-------------------|----------------------------------------| +| `compilationUnit` | current compilation unit | +| `phase` | current phase | +| `run` | current run | +| `period` | current period | +| `settings` | the config passed to the compiler | +| `reporter` | operations for logging errors/warnings | +| `definitions` | the standard built in definitions | +| `platform` | operations for the underlying platform | +| `tree` | current tree | +| `scope` | current scope | +| `typer` | current typer | +| `owner` | current owner symbol | +| `outer` | outer Context | +| `mode` | type checking mode | +| `typerState` | | +| `searchHistory` | | +| `implicits` | | +| ... | and so on | + + +[Contexts]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Contexts.scala diff --git a/docs/_docs/contributing/architecture/index.md b/docs/_docs/contributing/architecture/index.md new file mode 100644 index 000000000000..9b976cc643cd --- /dev/null +++ b/docs/_docs/contributing/architecture/index.md @@ -0,0 +1,14 @@ +--- +layout: index +title: High Level Architecture +--- + +This chapter of the guide describes the architecture and concepts of `dotc`, +the Scala 3 compiler, including answers to questions such as: +- "What are the transformations that happen to my code?" +- "How do I run a compiler programatically?" +- "What are symbols, denotations, names and types?" +- "What is a compiler phase?" +- "What is the compiler Context?" + +and many more. diff --git a/docs/_docs/contributing/architecture/lifecycle.md b/docs/_docs/contributing/architecture/lifecycle.md new file mode 100644 index 000000000000..2cf58f477da3 --- /dev/null +++ b/docs/_docs/contributing/architecture/lifecycle.md @@ -0,0 +1,90 @@ +--- +layout: doc-page +title: Compiler Overview +--- + +At a high level, `dotc` is an interactive compiler (see [what is a compiler?](../index.md#what-is-a-compiler)), +and can be invoked frequently, for example to answer questions for an IDE, provide REPL completions, +or to manage incremental builds and more. Each of these use cases requires a customised +workflow, but sharing a common core. + +## Introducing the Compiler's Lifecycle + +#### Core +Customisation is provided by extending the [Compiler] class, which maintains an ordered +list of [phases][Phases], and how to [run][Run] them. Each interaction with a compiler +creates a new run, which is a complete iteration of the compiler's phases over a list +of input sources. Each run has the capability to create new definitions or +invalidate older ones, and `dotc` can [track these changes over time](../architecture/time.md). + +#### Runs +During a run, the input sources are converted to [compilation units][CompilationUnit] (i.e. the abstraction of +compiler state associated with each input source); then iteratively: a single phase is applied to +every compilation unit before progressing to the next phase. + +#### Phases +A phase is an abstract transformation over a compilation unit, it is usually responsible +for transforming the trees and types representing the code of a source file. Some phases of +the compiler are: +- `parser`, which converts text that matches Scala's + [syntax] into abstract syntax trees, ASTs +- `typer`, which checks that trees conform to expected types +- `erasure`, which retypes a more simplified program into one that has the same types as the JVM. +- `genBCode`, the JVM backend, which converts erased compiler trees into Java bytecode format. + +[You can read more about phases here](../architecture/phases.md#phase-categories). + +#### Drivers + +The core compiler also requires a lot of state to be initialised before use, such as [settings][ScalaSettings] +and the [Context](../architecture/context.md). For convenience, the [Driver] class contains high level functions for +configuring the compiler and invoking it programatically. The object [Main] inherits from `Driver` +and is invoked by the `scalac` script. + +## Code Structure + +The code of the compiler is found in the package [dotty.tools], +containing the following sub-packages: +```scala +tools // contains helpers and the `scala` generic runner +├── backend // Compiler backends (currently JVM and JS) +├── dotc // The main compiler, with subpackages: +│ ├── ast // Abstract syntax trees +│   ├── classpath +│   ├── config // Compiler configuration, settings, platform specific definitions. +│   ├── core // Core data structures and operations, with specific subpackages for: +│   │   ├── classfile // Reading of Java classfiles into core data structures +│   │   ├── tasty // Reading and writing of TASTY files to/from core data structures +│   │   └── unpickleScala2 // Reading of Scala2 symbol information into core data structures +│   ├── decompiler // pretty printing TASTY as code +│   ├── fromtasty // driver for recompilation from TASTY +│   ├── interactive // presentation compiler and code completions +│   ├── parsing // Scanner and parser +│   ├── plugins // compile plugin definitions +│   ├── printing // Pretty-printing trees, types and other data +│   ├── profile // internals for profiling the compiler +│   ├── quoted // internals for quoted reflection +│   ├── reporting // Reporting of error messages, warnings and other info. +│   ├── rewrites // Helpers for rewriting Scala 2's constructs into Scala 3's. +│   ├── sbt // Helpers for communicating with the Zinc compiler. +│   ├── semanticdb // Helpers for exporting semanticdb from trees. +│   ├── transform // Miniphases and helpers for tree transformations. +│   ├── typer // Type-checking +│   └── util // General purpose utility classes and modules. +├── io // Helper modules for file access and classpath handling. +├── repl // REPL driver and interaction with the terminal +├── runner // helpers for the `scala` generic runner script +└── scripting // scala runner for the -script argument +``` + + +[Phases]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Phases.scala +[CompilationUnit]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/CompilationUnit.scala + +[dotty.tools]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools +[ScalaSettings]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +[syntax]: https://docs.scala-lang.org/scala3/reference/syntax.html +[Main]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Main.scala +[Driver]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Driver.scala +[Compiler]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Compiler.scala +[Run]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Run.scala \ No newline at end of file diff --git a/docs/_docs/contributing/architecture/phases.md b/docs/_docs/contributing/architecture/phases.md new file mode 100644 index 000000000000..844ae144dddb --- /dev/null +++ b/docs/_docs/contributing/architecture/phases.md @@ -0,0 +1,108 @@ +--- +layout: doc-page +title: Compiler Phases +--- + +As described in the [compiler overview](lifecycle.md#phases), `dotc` is divided into a list of [phases][Phase], +specified in the [Compiler] class. + +#### Printing the phases of the Compiler + +a flattened list of all the phases can be displayed by invoking +the compiler with the `-Xshow-phases` flag: +``` +$ scalac -Xshow-phases +``` + +## Phase Groups + +In class [Compiler] you can access the list of phases with the method `phases`: + +```scala +def phases: List[List[Phase]] = + frontendPhases ::: picklerPhases ::: transformPhases ::: backendPhases +``` + +You can see that phases are actually grouped into sublists, given by the signature +`List[List[Phase]]`; that is, each sublist forms a phase group that is then *fused* into a +single tree traversal when a [Run] is executed. + +Phase fusion allows each phase of a group to be small and modular, +(each performing a single function), while reducing the number of tree traversals +and increasing performance. + +Phases are able to be grouped together if they inherit from [MiniPhase]. + +## Phase Categories + +Phases fall into four categories, allowing customisation by sub-classes of [Compiler]: + +### `frontendPhases` +In the main compiler these include [parser], [typer], [posttyper], +[prepjsinterop] and phases for producing SemanticDB and communicating with the +incremental compiler Zinc. +The [parser] reads source programs and generates untyped abstract syntax trees, which +in [typer] are then typechecked and transformed into typed abstract syntax trees. +Following is [posttyper], performing checks and cleanups that require a fully typed program. +In particular, it +- creates super accessors representing `super` calls in traits +- creates implementations of compiler-implemented methods, +such as `equals` and `hashCode` for case classes. +- marks [compilation units][CompilationUnit] that require inline expansion, or quote pickling +- simplifies trees of erased definitions +- checks variance of type parameters +- mark parameters passed unchanged from subclass to superclass for later pruning. + +### `picklerPhases` +These phases start with [pickler], which serializes typed trees +produced by the `frontendPhases` into TASTy format. Following is [inlining], +which expand calls to inline methods, and [postInlining] providing implementations +of the [Mirror] framework for inlined calls. +Finally are [staging], which ensures that quotes conform to the +[Phase Consistency Principle (PCP)][PCP], and [pickleQuotes] which converts quoted +trees to embedded TASTy strings. + +### `transformPhases` +These phases are concerned with tranformation into lower-level forms +suitable for the runtime system, with two sub-groupings: +- High-level transformations: All phases from [firstTransform] to [erasure]. + Most of these phases transform syntax trees, expanding high-level constructs + to more primitive ones. + - An important transform phase is [patternMatcher], which converts match + trees and patterns into lower level forms, as well as checking the + exhaustivity of sealed types, and unreachability of pattern cases. + - Some phases perform further checks on more primitive trees, + e.g. [refchecks] verifies that no abstract methods exist in concrete classes, + and [initChecker] checks that fields are not used before initialisation. + - The last phase in the group, [erasure] translates all + types into types supported directly by the JVM. To do this, it performs + another type checking pass, but using the rules of the JVM's type system + instead of Scala's. +- Low-level transformations: All phases from `ElimErasedValueType` to + `CollectSuperCalls`. These further transform trees until they are essentially a + structured version of Java bytecode. + +### `backendPhases` +These map the transformed trees to Java classfiles or SJSIR files. + +[CompilationUnit]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/CompilationUnit.scala +[Compiler]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Compiler.scala +[Phase]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Phases.scala +[MiniPhase]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +[Run]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Run.scala +[parser]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala +[typer]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +[posttyper]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +[prepjsinterop]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +[pickler]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Pickler.scala +[inlining]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Inlining.scala +[postInlining]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PostInlining.scala +[staging]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Staging.scala +[pickleQuotes]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +[refchecks]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +[initChecker]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +[firstTransform]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +[patternMatcher]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +[erasure]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Erasure.scala +[Mirror]: https://github.com/lampepfl/dotty/blob/master/library/src/scala/deriving/Mirror.scala +[PCP]: ../../reference/metaprogramming/macros.md#the-phase-consistency-principle diff --git a/docs/_docs/contributing/architecture/symbols.md b/docs/_docs/contributing/architecture/symbols.md new file mode 100644 index 000000000000..c19588a4ff12 --- /dev/null +++ b/docs/_docs/contributing/architecture/symbols.md @@ -0,0 +1,70 @@ +--- +layout: doc-page +title: Symbols +--- + +As discussed previously, `dotc` [maintains time-indexed views](time.md) of various +compiler artifacts. The following sections discuss how they are managed in the compiler. + +## Symbols + +Defined in [Symbols], a `Symbol` is a unique identifier for a definition (e.g. a method, +type, or field). A `ClassSymbol` extends `Symbol` and represents either a +`class`, or a `trait`, or an `object`. A `Symbol` can even refer to non-Scala entities, +such as from the Java standard library. + +## Definitions are Dynamic + +Traditionally, compilers store context-dependent data in a _symbol table_. +Where a symbol then is the central reference to address context-dependent data. +`dotc` instead uses a phase-indexed function (known as +a [Denotation][Denotations]) to compute views of definitions across phases, +as many of attributes associated with definitions are phase-dependent. For example: +- types are gradually simplified by several phases, +- owners change in [lambdaLift] (local methods are lifted to an enclosing class) + and [flatten] (when inner classes are moved to the top level) +- Names are changed when private members need to be accessed from outside + their class (for instance from a nested class or a class implementing + a trait). + +Additionally, symbols are not suitable to be used as a reference to +a definition in another [compilation unit][CompilationUnit]. +In the context of incremental compilation, a symbol from +an external compilation unit may be deleted or changed, making the reference +stale. To counter this, `dotc` types trees of cross-module references with either +a `TermRef` or `TypeRef`. A reference type contains a prefix type and a name. +The denotation that the type refers to is established dynamically based on +these fields. + +## Denotations + +On its own a `Symbol` has no structure. Its semantic meaning is given by being associated +with a [Denotation][Denotations]. + +A denotation is the result of resolving a name during a given period, containing the information +describing some entity (either a term or type), indexed by phase. Denotations usually have a +reference to a selected symbol, but not always, for example if the denotation is overloaded, +i.e. a `MultiDenotation`. + +### SymDenotations +All definition symbols will contain a `SymDenotation`. The denotation, in turn, contains: +- a reverse link to the source symbol +- a reference to the enclosing symbol that defined the source symbol: + - for a local variable, the enclosing method + - for a field or class, the enclosing class +- a set of [flags], describing the definition (e.g. whether it's a trait or mutable). +- the type of the definition (through the `info` method) +- a [signature][Signature1], which uniquely identifies overloaded methods (or else `NotAMethod`). +- and more. + +A class symbol will instead be associated with a `ClassDenotation`, which extends `SymDenotation` +with some additional fields specific for classes. + +[Signature1]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Signature.scala#L9-L33 +[Symbols]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Symbols.scala +[flatten]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Flatten.scala +[lambdaLift]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala +[CompilationUnit]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/CompilationUnit.scala +[Denotations]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Denotations.scala +[SymDenotations]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +[flags]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Flags.scala diff --git a/docs/_docs/contributing/architecture/time.md b/docs/_docs/contributing/architecture/time.md new file mode 100644 index 000000000000..588b1ce40bb2 --- /dev/null +++ b/docs/_docs/contributing/architecture/time.md @@ -0,0 +1,68 @@ +--- +layout: doc-page +title: Time in the Compiler +--- + +In the [compiler overview](lifecycle.md) section, we saw that `dotc` is an interactive compiler, +and so can answer questions about entities as they come into existance and change throughout time, +for example: +- which new definitions were added in a REPL session? +- which definitions were replaced in an incremental build? +- how are definitions simplified as they are adapted to the runtime system? + +## Hours, Minutes, and Periods + +For the compiler to be able to resolve the above temporal questions, and more, it maintains +a concept of time. Additionally, because interactions are frequent, it is important to +persist knowledge of entities between interactions, allowing the compiler to remain performant. +Knowing about time allows the compiler to efficiently mark entities as being outdated. + +Conceptually, `dotc` works like a clock, where its minutes are represented by [phases](phases.md), +and its hours by [runs]. Like a clock, each run passes once each of its phases have completed +sequentially, and then a new run can begin. Phases are further grouped into [periods], where +during a period certain entities of the compiler remain stable. + +## Time Travel + +During a run, each phase can rewrite the world as the compiler sees it, for example: +- to transform trees, +- to gradually simplify type from Scala types to JVM types, +- to move definitions out of inner scopes to outer ones, fitting the JVM's model, +- and so on. + +Because definitions can [change over time](symbols.md#definitions-are-dynamic), various artifacts associated with them +are stored non-destructively, and views of the definition created earlier, or later +in the compiler can be accessed by using the `atPhase` method, defined in [Contexts]. + +As an example, assume the following definitions are available in a [Context](context.md): +```scala +class Box { type X } + +def foo(b: Box)(x: b.X): List[b.X] = List(x) +``` + +You can compare the type of definition `foo` after the [typer] phase and after the [erasure] phase +by using `atPhase`: +```scala +import dotty.tools.dotc.core.Contexts.{Context, atPhase} +import dotty.tools.dotc.core.Phases.{typerPhase, erasurePhase} +import dotty.tools.dotc.core.Decorators.i + +given Context = … + +val fooDef: Symbol = … // `def foo(b: Box)(x: b.X): List[b.X]` + +println(i"$fooDef after typer => ${atPhase(typerPhase.next)(fooDef.info)}") +println(i"$fooDef after erasure => ${atPhase(erasurePhase.next)(fooDef.info)}") +``` +and see the following output: +``` +method foo after typer => (b: Box)(x: b.X): scala.collection.immutable.List[b.X] +method foo after erasure => (b: Box, x: Object): scala.collection.immutable.List +``` + +[runs]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/Run.scala +[periods]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Periods.scala +[Contexts]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Contexts.scala +[typer]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +[erasure]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Erasure.scala diff --git a/docs/_docs/contributing/architecture/types.md b/docs/_docs/contributing/architecture/types.md new file mode 100644 index 000000000000..64543e555e69 --- /dev/null +++ b/docs/_docs/contributing/architecture/types.md @@ -0,0 +1,147 @@ +--- +layout: doc-page +title: Compiler Types +--- + +## Common Types and their Representation + +Type representations in `dotc` derive from the class `dotty.tools.dotc.core.Types.Type`, +defined in [Types.scala]. The `toString` method on `Type` will display types in a +format corresponding to the backing data structure, e.g. `ExprType(...)` +corresponds to `class ExprType`, defined in [Types.scala]. + +> You can inspect the representation of any type using the [dotty.tools.printTypes][DottyTypeStealer] +> script, its usage and integration into your debugging workflow is [described here](../issues/inspection.md). + +### Types of Definitions + +The following table describes definitions in Scala 3, followed by the `dotc` representation +of two types - a reference to the definition, and then its underlying type. + +**Note**: in the following types, `p` refers to the self-type of the enclosing scope of +the definition, or `NoPrefix` for local definitions and parameters. + +Definition | Reference | Underlying Type +------------------------|-----------------|------------------------- +`type Z >: A <: B` | `TypeRef(p, Z)` | `RealTypeBounds(A, B)` +`type Z = A` | `TypeRef(p, Z)` | `TypeAlias(A)` +`type F[T] = T match …` | `TypeRef(p, F)` | `MatchAlias([T] =>> T match …)` +`class C` | `TypeRef(p, C)` | `ClassInfo(p, C, …)` +`trait T` | `TypeRef(p, T)` | `ClassInfo(p, T, …)` +`object o` | `TermRef(p, o)` | `TypeRef(p, o$)` where `o$` is a class +`def f(x: A): x.type` | `TermRef(p, f)` | `MethodType(x, A, TermParamRef(x))` +`def f[T <: A]: T` | `TermRef(p, f)` | `PolyType(T, <: A, TypeParamRef(T))` +`def f: A` | `TermRef(p, f)` | `ExprType(A)` +`(x: => A)` | `TermRef(p, x)` | `ExprType(A)` where `x` is a parameter +`val x: A` | `TermRef(p, x)` | `A` + +### Types of Values + +The following types may appear in part of the type of an expression: + +Type | Representation +--------------------------|------------------------------ +`x.y.type` | `TermRef(x, y)` +`X#T` | `TypeRef(X, T)` +`x.y.T` and `x.y.type#T` | `TypeRef(TermRef(x, y), T)` +`this.type` | `ThisType(C)` where `C` is the enclosing class +`"hello"` | `ConstantType(Constant("hello"))` +`A & B` | `AndType(A, B)` +`A | B` | `OrType(A, B)` +`A @foo` | `AnnotatedType(A, @foo)` +`[T <: A] =>> T` | `HKTypeLambda(T, <: A, TypeParamRef(T))` +`x.C[A, B]` | `AppliedType(x.C, List(A, B))` +`C { type A = T }` | `RefinedType(C, A, T)`
when `T` is not a member of `C` +`C { type X = Y }` | `RecType(RefinedType(C, X, z.Y))`
when `X` and `Y` are members of `C`
and `z` is a `RecThis` over the enclosing `RecType` +`super.x.type` | `TermRef(SuperType(…), x)` + +## Constructing Types + +### Method Definition Types + +You can see above that method definitions can have an underlying type of +either `PolyType`, `MethodType`, or `ExprType`. `PolyType` and `MethodType` +may be mixed recursively however, and either can appear as the result type of the other. + +Take this example as given: + +```scala +def f[A, B <: Seq[A]](x: A, y: B): Unit +``` +it can be constructed by the following code: + +```scala +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* + +given Context = … // contains the definitions of the compiler + +val f: Symbol = … // def f[A, B <: Seq[A]](x: A, y: B): Unit + +f.info = PolyType( + List("A".toTypeName, "B".toTypeName))( + pt => List( + TypeBounds(defn.NothingType, defn.AnyType), + TypeBounds(defn.NothingType, AppliedType(defn.SeqType, List(pt.newParamRef(0)))) + ), + pt => MethodType( + List("x".toTermName, "y".toTermName))( + mt => List(pt.newParamRef(0), pt.newParamRef(1)), + mt => defn.UnitType + ) +) +``` + +Note that `pt.newParamRef(0)` and `pt.newParamRef(1)` refers to the +type parameters `A` and `B` respectively. + +## Proxy Types and Ground Types +Types in `dotc` are divided into two semantic kinds: +- Ground Types (inheriting from either `CachedGroundType` or `UncachedGroundType`) +- Proxy Types (inheriting from `TypeProxy` via either `CachedProxyType` or `UncachedProxyType`) + +A Proxy Type is anything that can be considered to be an abstraction of another type, +which can be accessed by the `underlying` method of the `TypeProxy` class. It's dual, the +Ground Type has no meaningful underlying type, typically it is the type of method and class +definitions, but also union types and intersection types, along with utility types of the +compiler. + +Here's a diagram, serving as the mental model of the most important and distinct types available after the `typer` phase, derived from [dotty/tools/dotc/core/Types.scala][1]: + +``` +Type -+- proxy_type --+- NamedType --------+- TypeRef + | | \ + | +- SingletonType ----+- TermRef + | | +- ThisType + | | +- SuperType + | | +- ConstantType + | | +- TermParamRef + | | +- RecThis + | | +- SkolemType + | +- TypeParamRef + | +- RefinedOrRecType -+-- RefinedType + | | -+-- RecType + | +- AppliedType + | +- TypeBounds + | +- ExprType + | +- AnnotatedType + | +- TypeVar + | +- HKTypeLambda + | +- MatchType + | + +- ground_type -+- AndType + +- OrType + +- MethodOrPoly -----+-- PolyType + | +-- MethodType + +- ClassInfo + +- NoType + +- NoPrefix + +- ErrorType + +- WildcardType + +``` + +[Types.scala]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Types.scala +[DottyTypeStealer]: https://github.com/lampepfl/dotty/blob/master/compiler/test/dotty/tools/DottyTypeStealer.scala diff --git a/docs/_docs/contributing/contribute-knowledge.md b/docs/_docs/contributing/contribute-knowledge.md deleted file mode 100644 index 7164774ac1df..000000000000 --- a/docs/_docs/contributing/contribute-knowledge.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -layout: doc-page -title: Contributing Knowledge ---- - -# Contribute Internals-related Knowledge -If you know anything useful at all about Dotty, feel free to log this knowledge: - -- [📜Log the Knowledge](https://github.com/lampepfl/dotty-knowledge/issues/new) -- [🎓More about Logging the Knowledge](https://github.com/lampepfl/dotty-knowledge/blob/master/README.md) - -In short, no need to make it pretty, particularly human-readable or give it a particular structure. Just dump the knowledge you have and we'll take it from there. \ No newline at end of file diff --git a/docs/_docs/contributing/getting-started.md b/docs/_docs/contributing/getting-started.md index c842fd0a49d0..af9f2f0783b8 100644 --- a/docs/_docs/contributing/getting-started.md +++ b/docs/_docs/contributing/getting-started.md @@ -3,15 +3,41 @@ layout: doc-page title: Getting Started --- +## Scala CLA +Sometime before submitting your pull request you'll want to make sure you have +signed the [Scala CLA][scala-cla]. You can read more about why we require a CLA +and what exactly is included in it [here][scala-cla]. -Requirements ------------- -Make sure that you are using macOS or Linux (or WSL on Windows) with Java 8 or newer. You can determine which version of the JDK is the -default by typing `java -version` in a Terminal window. +## Making sure the team is aware + +Before digging into an issue or starting on a new feature it's a good idea to +make sure an [issue][dotty-issue] or a [discussion][dotty-discussion] has been +created outlining what you plan to work on. This is both for your and the team's +benefit. It ensures you get the help you need, and also gives the compiler team +a heads-up that someone is working on an issue. + +For some small changes like documentation, this isn't always necessary, but it's +never a bad idea to check. + +## Requirements + +- [git] is essential for managing the Scala 3 code, and contributing to GitHub, + where the code is hosted. +- A Java Virtual Machine (JDK 8 or higher), required for running the build tool. + - download Java from [Oracle Java 8][java8], [Oracle Java 11][java11], + or [AdoptOpenJDK 8/11][adopt]. Refer to [JDK Compatibility][compat] for Scala/Java compatibility detail. + - Verify that the JVM is installed by running the following command in a terminal: `java -version`. +- [sbt][sbt-download], the build tool required to build the Scala 3 compiler and libraries. + +## Nice To Have + +An IDE, such as [Metals] will help you develop in Scala 3 with features such as autocompletion or goto-definition, +and with the [VS Code][vs-code] text editor you can even use the Scala debugger, or create interactive worksheets for an +iterative workflow. + +## Compiling and Running -Compiling and Running ---------------------- Start by cloning the repository: ```bash @@ -48,8 +74,8 @@ $ scala HelloWorld ``` -Starting a REPL ---------------- +## Starting a REPL + ```bash $ sbt > repl @@ -64,8 +90,9 @@ or via bash: ```bash $ scala ``` -Publish to local repository ---------------------------------- + +## Publish to local repository + To test our cloned compiler on local projects: ```bash @@ -79,8 +106,8 @@ ThisBuild / scalaVersion := "-bin-SNAPSHOT" where `dotty-version` can be found in the file `project/Build.scala`, like `3.0.0-M2` -Generating Documentation -------------------------- +## Generating Documentation + To generate this page and other static page docs, run ```bash $ sbt @@ -92,9 +119,22 @@ Before contributing to Dotty, we invite you to consult the [Dotty Developer Guidelines](https://github.com/lampepfl/dotty/blob/main/CONTRIBUTING.md). -Community -------------- +## Community + The main development discussion channels are: - [github.com/lampepfl/dotty/discussions](https://github.com/lampepfl/dotty/discussions) - [contributors.scala-lang.org](https://contributors.scala-lang.org) - [gitter.im/scala/contributors](https://gitter.im/scala/contributors) + +[git]: https://git-scm.com +[Metals]: https://scalameta.org/metals/ +[vs-code]: https://code.visualstudio.com +[lampepfl/dotty]: https://github.com/lampepfl/dotty +[sbt-download]: https://www.scala-sbt.org/download.html +[java8]: https://www.oracle.com/java/technologies/javase-jdk8-downloads.html +[java11]: https://www.oracle.com/java/technologies/javase-jdk11-downloads.html +[adopt]: https://adoptopenjdk.net/ +[compat]: https://docs.scala-lang.org/overviews/jdk-compatibility/overview.html +[scala-cla]: https://www.lightbend.com/contribute/cla/scala +[dotty-issue]: https://github.com/lampepfl/dotty/issues +[dotty-discussion]: https://github.com/lampepfl/dotty/discussions diff --git a/docs/_docs/contributing/index.md b/docs/_docs/contributing/index.md index 6cf0def2d5e2..0cc87e4b3500 100644 --- a/docs/_docs/contributing/index.md +++ b/docs/_docs/contributing/index.md @@ -2,3 +2,48 @@ layout: index title: Contributing --- + +This guide is intended to give new contributors the knowledge they need to +become productive and fix issues or implement new features in Scala 3. It +also documents the inner workings of the Scala 3 compiler, `dotc`. + +### This is a living document + +Keep in mind that the code for `dotc` is continually changing, so the ideas +discussed in this guide may fall out of date. This is a living document, so +please consider contributing to it on +[GitHub](https://github.com/scala/docs.scala-lang/tree/main/_overviews/scala3-contribution) +if you notice anything out of date, or report any issues +[here](https://github.com/scala/docs.scala-lang/issues). + +### Get the Most from This Guide + +`dotc` is built with Scala 3, fully utilising its [new +features](https://docs.scala-lang.org/scala3/new-in-scala3.html). It is recommended that you first have +some familiarity with Scala 3 to get the most out of this guide. You can learn +more in the [language reference](../reference/overview.md). + +Many code snippets in this guide make use of shell commands (a line beginning +with `$`), and in this case a `bash` compatible shell is assumed. You may have +to look up how to translate commands to your shell. + +### What is a Compiler? + +Let's start at the beginning and first look at the question of "what is a +compiler?". A compiler is a program that takes as input text, representing a +program in one language and produces as output the same program, written in +another programming language. + +#### The Scala Compiler + +As an example, `dotc` takes text input, verifies that it is a valid Scala program +and then produces as output the same program, but written in Java bytecode, and optionally +in SJSIR when producing Scala.js output. + +### Contribute Internals-related Knowledge +If you know anything useful at all about Dotty, feel free to log this knowledge: + +- [📜Log the Knowledge](https://github.com/lampepfl/dotty-knowledge/issues/new) +- [🎓More about Logging the Knowledge](https://github.com/lampepfl/dotty-knowledge/blob/master/README.md) + +In short, no need to make it pretty, particularly human-readable or give it a particular structure. Just dump the knowledge you have and we'll take it from there. \ No newline at end of file diff --git a/docs/_docs/contributing/issues/areas.md b/docs/_docs/contributing/issues/areas.md new file mode 100644 index 000000000000..4f9adf79ba77 --- /dev/null +++ b/docs/_docs/contributing/issues/areas.md @@ -0,0 +1,70 @@ +--- +layout: doc-page +title: Common Issue Locations +--- + +Many issues are localised to small domains of the compiler and are self-contained, +here is a non-exhaustive list of such domains, and the files associated with them: + +### Pretty Printing of Types and Trees + +Objects in the compiler that inherit from [Showable] can be pretty printed. +The pretty-printing of objects is used in many places, from debug output, +to user-facing error messages and printing of trees after each phase. + +Look in [RefinedPrinter] (or its parent class [PlainPrinter]) for the implementation of pretty printing. + +### Content of Error Messages + +You can find the definitions of most error messages in [messages] (with IDs +defined in [ErrorMessageID]). If the message is not defined there, try the +`-Ydebug-error` compiler flag, which will print a stack trace leading to the +production of the error, and the contents of the message. + +### Compiler Generated Given Instances + +If the issue lies in given instances provided by the compiler, such as `scala.reflect.ClassTag`, +`scala.deriving.Mirror`, `scala.reflect.TypeTest`, `scala.CanEqual`, `scala.ValueOf`, +`scala.reflect.Manifest`, etc, look in [Synthesizer], which provides factories for +given instances. + +### Compiler Generated Methods + +Members can be generated for many classes, such as `equals` and `hashCode` +for case classes and value classes, and `ordinal` and `fromProduct` for Mirrors. +To change the implementation, see [SyntheticMembers]. + +### Code Completions +For suggestions to auto-complete method selections, see [Completion]. + +### Enum Desugaring +See [Desugar] and [DesugarEnums]. + +### Pattern Match Exhaustivity +See [Space]. + +### Metaprogramming + +#### Quotes Reflection +See the [quoted runtime package][quotes-impl]. + +#### Inline match +See [Inliner]. + +#### Compiletime Ops Types +See `tryCompiletimeConstantFold` in [Types]. + +[Showable]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/printing/Showable.scala +[PlainPrinter]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +[RefinedPrinter]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +[ErrorMessageID]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +[messages]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/reporting/messages.scala +[Synthesizer]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +[SyntheticMembers]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +[quotes-impl]: https://github.com/lampepfl/dotty/tree/master/compiler/src/scala/quoted/runtime/impl +[Inliner]: https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +[Types]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools/dotc/core/Types.scala +[Completion]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools/dotc/interactive/Completion.scala +[DesugarEnums]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala +[Desugar]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/ast/Desugar.scala +[Space]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala diff --git a/docs/_docs/contributing/issues/cause.md b/docs/_docs/contributing/issues/cause.md new file mode 100644 index 000000000000..5bb04e894f70 --- /dev/null +++ b/docs/_docs/contributing/issues/cause.md @@ -0,0 +1,115 @@ +--- +layout: doc-page +title: Finding the Cause of an Issue +--- + +In this section, you will be able to answer questions such as: +- where does an error happen in a codebase? +- when during compilation was a particular tree introduced? +- where is a particular object created? +- where is a particular value assigned to a variable? + +> You may be able to quickly find the source responsible for an issue by consulting [common issue locations](../issues/areas.md) + +## What phase generated a particular tree? + +As described in the [compiler lifecycle](../architecture/lifecycle.md#phases), each phase transforms the trees +and types that represent your code in a certain way. + +To print the code as it is transformed through the compiler, use the compiler flag `-Xprint:all`. +After each phase group is completed, you will see the resulting trees representing the code. + +> It is recommended to test `-Xprint:all` on a single, small file, otherwise a lot of unnecessary +> output will be generated. + +### Trace a Tree Creation Site + +When you see a problematic tree appear after a certain phase group, you know to isolate the rest of +your search to the code of that phase. For example if you found a problematic tree after phase +`posttyper`, the problem most likely appears in the code of [PostTyper]. We can trace the exact point +the tree was generated by looking for its unique ID, and then generating a stack trace at its creation: + +1. Run the compiler with `-Xprint:posttyper` and `-Yshow-tree-ids` flags. + This will only print the trees of the `posttyper` phase. This time you should see the tree + in question be printed alongside its ID. You'll see something like `println#223("Hello World"#37)`. +2. Copy the ID of the desired tree. +3. Run the compiler with `-Ydebug-tree-with-id ` flag. The compiler will print a stack trace + pointing to the creation site of the tree with the provided ID. + +### Enhanced Tree Printing + +As seen above `-Xprint:` can be enhanced with further configuration flags, found in +[ScalaSettings]. For example, you can additionally print the type of a tree with `-Xprint-types`. + +## Increasing Logging Output +Once you have identified the phase that generated a certain tree, you can then increase +logging in that phase, to try and detect erroneous states: + +- general logging within a phase can be enabled with the `-Ylog` compiler flag, such as + - `-Ylog:,,...` for individual phases + - `-Ylog:all` for all phases. +- Additionally, various parts of the compiler have specialised logging objects, defined in [Printers]. + Change any of the printers of interest from `noPrinter` to `default` and increase output specialised + to that domain. + +## Navigating to Where an Error is Generated + +The compiler issues user facing errors for code that is not valid, such as the type mismatch +of assigning an `Int` to a `Boolean` value. Sometimes these errors do not match what is expected, which could be a bug. + +To discover why such a *spurious* error is generated, you can trace the code that generated the error by +adding the `-Ydebug-error` compiler flag, e.g. `scala3/scalac -Ydebug-error Test.scala`. +This flag forces a stack trace to be printed each time an error happens, from the site where it occurred. + +Analysing the trace will give you a clue about the objects involved in producing the error. +For example, you can add some debug statements before the error is issued to discover +the state of the compiler. [See some useful ways to debug values.](./inspection.md) + +### Where was a particular object created? + +If you navigate to the site of the error, and discover a problematic object, you will want to know +why it exists in such a state, as it could be the cause of the error. You can discover the +creation site of that object to understand the logic that created it. + +You can do this by injecting a *tracer* into the class of an instance in question. +A tracer is the following variable: +```scala +val tracer = Thread.currentThread.getStackTrace.mkString("\n") +``` +When placed as a member definition at a class, it will contain a stack trace pointing at where exactly +its particular instance was created. + +Once you've injected a tracer into a class, you can `println` that tracer from the error site or +other site you've found the object in question. + +#### Procedure + +1. Determine the type of the object in question. You can use one of the following techniques to do so: + - Use an IDE to get the type of an expression, or save the expression to a `val` + and see its inferred type. + - Use `println` to print the object or use `getClass` on that object. +2. Locate the type definition for the type of that object. +3. Add a field `val tracer = Thread.currentThread.getStackTrace.mkString("\n")` to that type definition. +4. `println(x.tracer)` (where `x` is the name of the object in question) from the original site where you + encountered the object. This will give you the stack trace pointing to the place where the + constructor of that object was invoked. + +### Where was a particular value assigned to a variable? + +Say you have a certain [type](../architecture/types.md) assigned to a [Denotation] and you would like to know why it has that +specific type. The type of a denotation is defined by `var myInfo: Type`, and can be assigned multiple times. +In this case, knowing the creation site of that `Type`, as described above, is not useful; instead, you need to +know the *assignment* (not *creation*) site. + +This is done similarly to how you trace the creation site. Conceptually, you need to create a proxy for that variable that will log every write operation to it. Practically, if you are trying to trace the assignments to a variable `myInfo` of type `Type`, first, rename it to `myInfo_debug`. Then, insert the following at the same level as that variable: + +```scala +var tracer = "", +def myInfo: Type = myInfo_debug, +def myInfo_=(x: Type) = { tracer = Thread.currentThread.getStackTrace.mkString("\n"); myInfo_debug = x } +``` + +[Printers]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/Printers.scala +[Denotation]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Denotations.scala +[PostTyper]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +[ScalaSettings]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala diff --git a/docs/_docs/contributing/issues/checklist.md b/docs/_docs/contributing/issues/checklist.md new file mode 100644 index 000000000000..e2fcf32531de --- /dev/null +++ b/docs/_docs/contributing/issues/checklist.md @@ -0,0 +1,135 @@ +--- +layout: doc-page +title: Pull Request Checklist +--- + +Once you solved the issue you were working on, you'll likely want to see your +changes added to the [Scala 3 repo][lampepfl/dotty]. To do that, you need to +prepare a [pull request][pull-request] with your changes. Assuming that the team +is aware of what you've been working, here are some final steps that you'll want +to keep in mind as you create your PR. + +### 1. Sign the CLA + +Make sure you have signed the [Scala CLA][cla]. If you have any questions about +what this is and why it's required you can read further about it [here][cla]. + +### 2. Make sure your work is on its own branch + +When submitting your pull request it's always best to ensure the branch name is +unique to the changes you're working on. It's important not to submit your PR on +your `main` branch as this blocks maintainers from making any changes to your PR +if necessary. + +### 3: Add Tests + +Add at least one test that replicates the problem in the issue, and that shows it is now resolved. + +You may of course add variations of the test code to try and eliminate edge cases. +[Become familiar with testing in Scala 3](./testing.md). + +### 4: Add Documentation + +Please ensure that all code is documented to explain its use, even if only internal +changes are made. This refers to scaladocs and also any changes that might be +necessary in the reference docs. + +### 5: Double check everything + +Here are a couple tips to keep in mind. + +- [DRY (Don't Repeat Yourself)][dry] +- [Scouts Rule][scouts] +- When adding new code try use [optional braces]. If you're rewriting old code, + you should also use optional braces unless it introduces more code changes + that necessary. + +### 6: Commit Messages + +Here are some guidelines when writing commits for Dotty. + +1. If your work spans multiple local commits (for example; if you do safe point + commits while working in a feature branch or work in a branch for long time + doing merges/rebases etc.) then please do not commit it all but rewrite the + history by squashing the commits into one large commit which is accompanied + by a detailed commit message for (as discussed in the following sections). + For more info, see the article: [Git Workflow][git-workflow]. Additionally, + every commit should be able to be used in isolation—that is, each commit must + build and pass all tests. + +2. The first line should be a descriptive sentence about what the commit is + doing. It should be possible to fully understand what the commit does by just + reading this single line. It is **not ok** to only list the ticket number, + type "minor fix" or similar. If the commit has a corresponding ticket, + include a reference to the ticket number, prefixed with "Closes #", at the + beginning of the first line followed by the title of the ticket, assuming + that it aptly and concisely summarizes the commit in a single line. If the + commit is a small fix, then you are done. If not, go to 3. + +3. Following the single line description (ideally no more than 70 characters + long) should be a blank line followed by an enumerated list with the details + of the commit. + +4. Add keywords for your commit (depending on the degree of automation we reach, + the list may change over time): + * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone + is encouraged to give feedback, however. (Remember that @-mentions will + result in notifications also when pushing to a WIP branch, so please only + include this in your commit message when you're ready for your pull + request to be reviewed. Alternatively, you may request a review in the + pull request's description.) + * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the + ticket as fixed in the issue tracker (Assembla understands this). + * ``backport to _branch name_`` - if the fix needs to be cherry-picked to + another branch (like 2.9.x, 2.10.x, etc) + +Example: + +``` +fix: here is your pr title briefly mentioning the topic + +Here is the body of your pr with some more information + - Details 1 + - Details 2 + - Details 3 + +Closes #2 +``` + +### 7: Create your PR! + +When the feature or fix is completed you should open a [Pull +Request](https://help.github.com/articles/using-pull-requests) on GitHub. + +If you're not actually finished yet and are just looking for some initial input +on your approach, feel free to open a [Draft PR][draft]. This lets reviewers +know that you're not finished yet. It's also a good idea to put a [wip] in front +of your pr title to make this extra clear. + +Shortly after creating your pull request a maintainer should assign someone to +review it. If this doesn't happen after a few days, feel free to ping someone on +the [Scala Contributors Discor][discord] or tag someone on the PR. Depending on +the type of pull request there might be multiple people that take a look at your +changes. There might also be community input as we try to keep the review +process as open as possible. + +### 8: Addressing feedback + +More than likely you'll get feedback from the reviewers, so you'll want to make +sure to address everything. When in doubt, don't hesitate to ask for +clarification or more information. + +Once you finally see the "LGTM" (Looks Good To Me or Let's Get This Merged) +you're PR will be merged in! + +[pull-request]: https://docs.github.com/en?query=pull+requests +[lampepfl/dotty]: https://github.com/lampepfl/dotty +[cla]: http://typesafe.com/contribute/cla/scala +[issues]: https://github.com/lampepfl/dotty/issues +[full-list]: https://github.com/lampepfl/dotty/blob/master/CONTRIBUTING.md +[discord]: https://discord.gg/TSmY9zkHar +[dry]: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch30.html +[scouts]: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch08.html +[optional-braces]: https://docs.scala-lang.org/scala3/reference/other-new-features/indentation.html +[draft]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#draft-pull-requests +[git-workflow]: http://sandofsky.com/blog/git-workflow.html diff --git a/docs/_docs/contributing/issues/debugging.md b/docs/_docs/contributing/issues/debugging.md new file mode 100644 index 000000000000..2d8a9e5941e4 --- /dev/null +++ b/docs/_docs/contributing/issues/debugging.md @@ -0,0 +1,189 @@ +--- +layout: doc-page +title: Debugging the Compiler +--- + +The debugger is a powerful tool to navigate the internals of the compiler and track bugs. + +You can start the Scala debugger in VSCode using [Metals](https://scalameta.org/metals/). +In this page you will learn how to configure it, and how to use it. + +## Importing the project in VSCode using Metals + +The first step is to import the build in Metals, if it has not yet been imported. + +To do so you can open the [lampefl/dotty][lampepfl/dotty] repository in VSCode and click `Import build` in Metals view. +It may take a few minutes to import, compile and index the full project. + +![Import build](/images/contribution/import-build.jpg) + +If you have any trouble with importing, you can try to switch the build server from Bloop to sbt, +by running the `Metals: Switch build server` command from VSCode command palette. + +## Configuring the debugger + +To configure the debugger in VSCode, you can go to the `Run and Debug` view and click `create a launch.json file`. +It creates the `launch.json` file in the `.vscode` folder, in which we will define the debug configurations. + +![Create launch.json file](/images/contribution/launch-config-file.jpg) + +To create a debug configuration: +- Open the `.vscode/launch.json` file +- Click the `Add Configuration` button +- Go down the list of templates and select `Scala: Run main class` + +![Create configuration](/images/contribution/create-config.jpg) + +The added configuration should look like this: +```json +{ + "type": "scala", + "request": "launch", + "name": "Untitled", + "mainClass": "???", + "args": [], + "jvmOptions": [], + "env": {} +} +``` + +This is a template that you need to fill out. +First You can give a `name` to your configuration, for instance `Debug Scala 3 Compiler`. + +The two most important parameters, to debug the compiler, are `mainClass` and `args`. +The `mainClass` of the compiler is `dotty.tools.dotc.Main`. +In the `args` you need to specify the compiler arguments, which must contain at least a Scala file to compile and a `-classpath` option. + +To start with, we can compile the `../tests/pos/HelloWorld.scala` file. +In the classpath, we always need at least the `scala-library_2.13` and the bootstrapped `scala3-library_3`. +To locate them on your filesystem you can run the `export scala3-library-bootstrapped/fullClasspath` command in sbt. + +``` +$ sbt +> export scala3-library-bootstrapped/fullClasspath +/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar +[success] Total time: 1 s, completed Mar 10, 2023, 4:37:43 PM +``` + +Note that it is important to use the bootstrapped version of the `scala3-library` to get the correct TASTy version. + +Additionally you can add the `-color` and `never` arguments to prevent the compiler from printing ANSI codes as strings in the debug console. + +Here is the final configuration: +```json +{ + "type": "scala", + "request": "launch", + "name": "Debug Scala 3 Compiler", + "mainClass": "dotty.tools.dotc.Main", + "args": [ + "../tests/pos/HelloWorld.scala", + "-classpath", + // To replace with your own paths + "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar", + "-color", + "never" + ], + "jvmOptions": [], + "env": {} +} +``` + +## Customizing the debug configurations + +### Compiling several files at once + +You can compile more than one Scala file, by adding them in the `args`: +```json +"args": [ + "file1.scala", + "file2.scala", + "-classpath", + "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar" +] +``` + +### Depending on a library + +To add a dependency to an external library you need to download it and all its transitive dependencies, and to add them in the classpath. +The Coursier CLI can help you to do that. +For instance to add a dependency to cats you can run: +``` +$ cs fetch org.typelevel::cats-core:2.+ --classpath --scala-version 3 --exclude org.scala-lang:scala-library --exclude org.scala-lang:scala3-library +/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-core_3/2.9.0/cats-core_3-2.9.0.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_3/2.9.0/cats-kernel_3-2.9.0.jar +``` + +And concatenate the output into the classpath argument, which should already contain the scala-library_2.13 and the bootstrapped scala3-library: + +```json +"args": [ + "using-cats.scala", + "-classpath", + "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-core_3/2.9.0/cats-core_3-2.9.0.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_3/2.9.0/cats-kernel_3-2.9.0.jar" +] +``` + +### Add more compiler options + +In the `args` you can add any additional compiler option you want. + +For instance you can add `-Xprint:all` to print all the generated trees after each mega phase. + +Run `scalac -help` to get an overview of the available compiler options. + +### Defining more than one launch configuration + +You can create as many debug configurations as you need: to compile different files, with different compiler options or different classpaths. + +## Starting the debugger + +Before starting the debugger you need to put a breakpoint in the part of the code that you want to debug. +If you don't know where to start, you can put a breakpoint in the `main` method of the `dotty.tools.dotc.Driver` trait. + +![First breakpoint](/images/contribution/breakpoint.jpg) + +Now to start the debugger, open the debug view, find the drop-down list of all the debug configurations and click on yours. +The debugger should start and pause on your breakpoint. + +![Start debugger](/images/contribution/start-debugger.jpg) + +## Using the debugger + +### Navigating the call stack + +When the debugger has paused, you can see the current call stack in the `Debug and Run` view. +Each frame of the call stack contains different variables, whose values you can see in the `Variables` section of the `Debug and Run` view. + +![Call stack](/images/contribution/call-stack.jpg) + +Analysing the call stack and the variables can help you understand the path taken by the compiler to reach that state. + +### The debugging steps + +The debug toolbar contains the `Continue / Pause`, `Step Over`, `Step Into`, `Step Out`, `Restart` and `Stop` buttons. + +![Debugging steps](/images/contribution/toolbar.jpg) + +You can use the step buttons to execute the code step by step and get a precise understanding of the program. + +### The debug console + +When the debugger has paused, you can evaluate any Scala 3 expression in the debug console. +This is useful to inspect some values or to execute some parts of the code. + +For instance, you can evaluate `tree.show` to pretty-print a tree. + +![Import build](/images/contribution/debug-console.jpg) + +### Conditional breakpoints + +In a breakpoint you can define a condition, in the form of a Boolean expression written in Scala. +The program will stop on the breakpoint as soon as the condition is met. + +To add a condition, right-click on a breakpoint and pick `Edit breakpoint...`. + +For instance, if you know that a bug happens on typing a method `foo`, you can use the condition `tree.symbol.name.show == "foo"` in a breakpoint in the `Typer`. + +![Import build](/images/contribution/conditional-breakpoint.jpg) + +[lampepfl/dotty]: https://github.com/lampepfl/dotty diff --git a/docs/_docs/contributing/issues/efficiency.md b/docs/_docs/contributing/issues/efficiency.md new file mode 100644 index 000000000000..07307646a4bb --- /dev/null +++ b/docs/_docs/contributing/issues/efficiency.md @@ -0,0 +1,24 @@ +--- +layout: doc-page +title: Improving Your Workflow +--- + +In the previous sections of this chapter, you saw some techniques for +working with the compiler. Some of these techniques can be used +repetitively, e.g.: + +- Navigating stack frames +- Printing variables in certain ways +- Instrumenting variable definitions with tracers + +The above procedures often take a lot of time when done manually, reducing productivity: +as the cost (in terms of time and effort) is high, you may avoid attempting to do so, +and possibly miss valuable information. + +If you're doing those things really frequently, it is recommended to script your editor +to reduce the number of steps. E.g. navigating to the definition of a stack frame +part when you click it, or instrumenting variables for printing. + +An example of how it is done for Sublime Text 3 is [here](https://github.com/anatoliykmetyuk/scala-debug-sublime). + +True, it takes some time to script your editor, but if you spend a lot of time with issues, it pays off. diff --git a/docs/_docs/contributing/issues/index.md b/docs/_docs/contributing/issues/index.md new file mode 100644 index 000000000000..db348d7edd9d --- /dev/null +++ b/docs/_docs/contributing/issues/index.md @@ -0,0 +1,17 @@ +--- +layout: index +title: Finding the Cause of an Issue +--- + +An issue found in the [GitHub repo][lampepfl/dotty] usually describes some code that +manifests undesired behaviour. + +This chapter of the guide describes the different steps to contribute to Dotty: +- [Reproducing an Issue](./reproduce.md) +- [Finding the Cause of an Issue](./cause.md) +- [Debugging the Compiler](./debugging.md) +- [Other debugging techniques](./other-debugging.md) +- [Inspect the values](./inspection.md) +- [Improving your workflow](./efficiency.md) +- [Testing a Fix](./testing.md) +- [Checklist](./checklist.md) diff --git a/docs/_docs/contributing/issues/inspection.md b/docs/_docs/contributing/issues/inspection.md new file mode 100644 index 000000000000..abedc09ecd3b --- /dev/null +++ b/docs/_docs/contributing/issues/inspection.md @@ -0,0 +1,181 @@ +--- +layout: doc-page +title: How to Inspect Values +--- + +In this section, you will find out how to debug the contents of certain objects +while the compiler is running, and inspect produced artifacts of the compiler. + +## Inspecting variables in-place + +Frequently you will need to inspect the content of a particular variable. +You can either use `println`s or the debugger, more info on how to setup the latter. + +In the remeainder of this article we'll use `println()` inserted in the code, but the same effect can be accomplished by stopping at a breakpoint, and typing `` in the [debug console](./debugging.md#the-debug-console) of the debugger. + +When printing a variable, it's always a good idea to call `show` on that variable: `println(x.show)`. +Many objects of the compiler define `show`, returning a human-readable string. +e.g. if called on a tree, the output will be the tree's representation as source code, rather than +the underlying raw data. + +Sometimes you need to print flags. Flags are metadata attached to [symbols] containing information such as whether a +class is abstract, comes from Java, what modifiers a variable has (private, protected etc) and so on. +Flags are stored in a single `Long` value, each bit of which represents whether a particular flag is set. + +To print flags, you can use the `flagsString` method, e.g. `println(x.flagsString)`. + +## Pretty Printing with a String Interpolator + +You can also pretty print objects with string interpolators, +these default to call `.show` when possible, avoiding boilerplate +and also helping format error messages. + +Import them with the following: + +```scala +import dotty.tools.dotc.core.Decorators.* +``` + +Here is a table of explanations for their use: + +| Usage | Description | +|--------|-----------------------------------| +|`i""` | General purpose string formatting. It calls `.show` on objects
mixing in Showable, `String.valueOf` otherwise | +|`em""` | Formatting for error messages: Like `i` but suppress
follow-on, error messages after the first one if some
of their arguments are "non-sensical". | +|`ex""` | Formatting with added explanations: Like `em`, but add
explanations to give more info about type variables
and to disambiguate where needed. | + + +## Obtaining debug output from the compiler + +As explained in [navigation](../issues/cause.md), we can debug the code being generated as it is transformed +through the compiler. As well as plain tree output, there are many compiler options that +add extra debug information to trees when compiling a file; you can find the full list +in [ScalaSettings]. + +## Stopping the compiler early +Sometimes you may want to stop the compiler after a certain phase, for example to prevent +knock-on errors from occurring from a bug in an earlier phase. Use the flag +`-Ystop-after:` to prevent any phases executing afterwards. + +> e.g. `-Xprint:` where `phase` is a miniphase, will print after +> the whole phase group is complete, which may be several miniphases after `phase`. +> Instead you can use `-Ystop-after: -Xprint:` to stop +> immediately after the miniphase and see the trees that you intended. + +## Printing TASTy of a Class + +If you are working on an issue related to TASTy, it is good to know how to inspect +the contents of a TASTy file, produced from compilation of Scala files. + +The next example uses an [issue directory](../issues/reproduce.md#dotty-issue-workspace) to compile a class and print its TASTy. +In the directory, you should create a file `tasty/Foo.scala` (with contents of `class Foo`), +and create a file `tasty/launch.iss` with the following contents: + +``` +$ (rm -rv out || true) && mkdir out # clean up compiler output, create `out` dir. + +scala3/scalac -d $here/out $here/Foo.scala + +scala3/scalac -print-tasty $here/out/Foo.tasty +``` + +With sbt command `issue tasty` you will see output such as the following: + +``` +-------------------------------------------------------------------------------- +local/foo/out/Foo.tasty +-------------------------------------------------------------------------------- +Names: + 0: ASTs + 1: + 2: Foo + 3: +... +``` +and so on. + +## Inspecting The Representation of Types + +> [learn more about types](../architecture/types.md) in `dotc`. + +If you are curious about the representation of a type, say `[T] =>> List[T]`, +you can use a helper program [dotty.tools.printTypes][DottyTypeStealer], +it prints the internal representation of types, along with their class. It can be +invoked from the sbt shell with three arguments as follows: +```bash +sbt:scala3> scala3-compiler/Test/runMain + dotty.tools.printTypes + + + +``` + +- The first argument, `source`, is an arbitrary string that introduces some Scala definitions. +It may be the empty string `""`. +- The second argument, `kind`, determines the format of the following arguments, +accepting one of the following options: + - `rhs` - accept return types of definitions + - `class` - accept signatures for classes + - `method` - accept signatures for methods + - `type` - accept signatures for type definitions + - The empty string `""`, in which case `rhs` will be assumed. +- The remaining arguments are type signature strings, accepted in the format determined by +`kind`, and collected into a sequence `typeStrings`. Signatures are the part of a definition +that comes after its name, (or a simple type in the case of `rhs`) and may reference +definitions introduced by the `source` argument. + +Each one of `typeStrings` is then printed, displaying their internal structure, alongside their class. + +### Examples + +Here, given a previously defined `class Box { type X }`, you can inspect the return type `Box#X`: +```bash +sbt:scala3> scala3-compiler/Test/runMain +> dotty.tools.printTypes +> "class Box { type X }" +> "rhs" +> "Box#X" +[info] running (fork) dotty.tools.printTypes "class Box { type X }" rhs Box#X +TypeRef(TypeRef(ThisType(TypeRef(NoPrefix,module class )),class Box),type X) [class dotty.tools.dotc.core.Types$CachedTypeRef] +``` + +Here are some other examples you can try: +- `...printTypes "" "class" "[T] extends Seq[T] {}"` +- `...printTypes "" "method" "(x: Int): x.type"` +- `...printTypes "" "type" "<: Int" "= [T] =>> List[T]"` + +### Don't just print: extracting further information + +`dotty.tools.printTypes` is useful to to see the representation +of a type at a glance, but sometimes you want to extract more. Instead, you can use the +method `dotty.tools.DottyTypeStealer.stealType`. With the same inputs as `printTypes`, +it returns both a `Context` containing the definitions passed, along with the list of types. + +As a worked example let's create a test case to verify the structure of `Box#X` that you saw earlier: +```scala +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Types.* + +import org.junit.Test + +import dotty.tools.DottyTypeStealer, DottyTypeStealer.Kind + +class StealBox: + + @Test + def stealBox: Unit = + val (ictx, List(rhs)) = + DottyTypeStealer.stealType("class Box { type X }", Kind.rhs, "Box#X") + + given Context = ictx + + rhs match + case X @ TypeRef(Box @ TypeRef(ThisType(empty), _), _) => + assert(Box.name.toString == "Box") + assert(X.name.toString == "X") + assert(empty.name.toString == "") +``` + +[DottyTypeStealer]: https://github.com/lampepfl/dotty/blob/master/compiler/test/dotty/tools/DottyTypeStealer.scala +[ScalaSettings]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +[symbols]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/SymDenotations.scala diff --git a/docs/_docs/contributing/debugging.md b/docs/_docs/contributing/issues/other-debugging.md similarity index 94% rename from docs/_docs/contributing/debugging.md rename to docs/_docs/contributing/issues/other-debugging.md index 959ad6706290..1aa0fb85e5f8 100644 --- a/docs/_docs/contributing/debugging.md +++ b/docs/_docs/contributing/issues/other-debugging.md @@ -1,26 +1,8 @@ --- layout: doc-page -title: Debugging Techniques +title: Other Debugging Techniques --- -# Debugging Techniques -- [Setting up the playground](#setting-up-the-playground) -- [Show for human readable output](#show-for-human-readable-output) -- [How to disable color](#how-to-disable-color) -- [Reporting as a non-intrusive println](#reporting-as-a-non-intrusive-println) -- [Printing out trees after phases](#printing-out-trees-after-phases) -- [Printing out stack traces of compile time errors](#printing-out-stack-traces-of-compile-time-errors) -- [Configuring the printer output](#configuring-the-printer-output) -- [Figuring out an object creation site](#figuring-out-an-object-creation-site) - * [Via ID](#via-id) - * [Via tracer](#via-tracer) -- [Built-in Logging Architecture](#built-in-logging-architecture) - * [Printers](#printers) - * [Tracing](#tracing) - * [Reporter](#reporter) - -Table of contents generated with markdown-toc - ## Setting up the playground Consider the `../issues/Playground.scala` (relative to the Dotty directory) file is: diff --git a/docs/_docs/contributing/issues/reproduce.md b/docs/_docs/contributing/issues/reproduce.md new file mode 100644 index 000000000000..41d96327ef24 --- /dev/null +++ b/docs/_docs/contributing/issues/reproduce.md @@ -0,0 +1,127 @@ +--- +layout: doc-page +title: Reproducing an Issue +--- + +To try fixing it, you will first need to reproduce the issue, so that +- you can understand its cause +- you can verify that any changes made to the codebase have a positive impact on the issue. + +Say you want to reproduce locally issue [#7710], you would first copy the code from the *"Minimised Code"* +section of the issue to a file named e.g. `local/i7710.scala`, +and then try to compile it from the sbt console opened in the dotty root directory: +```bash +$ sbt +sbt:scala3> scala3/scalac -d local/out local/i7710.scala +``` +> Here, the `-d` flag specifies a directory `local/out` where generated code will be output. + +You can then verify that the local reproduction has the same behaviour as originally reported in the issue. +If so, then you can start to try and fix it. Otherwise, perhaps the issue is out of date, or +is missing information about how to accurately reproduce the issue. + +## Dotty Issue Workspace + +Sometimes you will need more complex commands to reproduce an issue, and it is useful to script these, which +can be done with [dotty-issue-workspace]. It allows to bundle sbt commands for issue reproduction in one +file and then run them from the Dotty project's sbt console. + +### Try an Example Issue + +Let's use [dotty-issue-workspace] to reproduce issue [#7710]: +1. Follow [the steps in the README][workspace-readme] to install the plugin. +2. In your Issue Workspace directory (as defined in the plugin's README file, + "Getting Started" section, step 2), create a subdirectory for the + issue: `mkdir i7710`. +3. Create a file for the reproduction: `cd i7710; touch Test.scala`. In that file, + insert the code from the issue. +4. In the same directory, create a file `launch.iss` with the following content: + ```bash + $ (rm -rv out || true) && mkdir out # clean up compiler output, create `out` dir. + + scala3/scalac -d $here/out $here/Test.scala + ``` + + - The first line, `$ (rm -rv out || true) && mkdir out` specifies a shell command + (it starts with `$`), in this case to ensure that there is a fresh `out` + directory to hold compiler output. + - The next line, `scala3/scalac -d $here/out $here/Test.scala` specifies an sbt + command, which will compile `Test.scala` and place any output into `out`. + `$here` is a special variable that will be replaced by the path of the parent + directory of `launch.iss` when executing the commands. +5. Now, from a terminal you can run the issue from sbt in the dotty directory + ([See here](../getting-started.md#compiling-and-running) for a reminder if you have not cloned the repo.): + ```bash + $ sbt + sbt:scala3> issue i7710 + ``` + This will execute all the commands in the `i7710/launch.iss` file one by one. + If you've set up `dotty-issue-workspace` as described in its README, + the `issue` task will know where to find the folder by its name. + +### Using Script Arguments + +You can use script arguments inside `launch.iss` to reduce the number of steps when +working with issues. + +Say you have an issue `foo`, with two alternative files that are very similar: +`original.scala`, which reproduces the issue, and `alt.scala`, which does not, +and you want to compile them selectively? + +You can achieve this via the following `launch.iss`: + +```bash +$ (rm -rv out || true) && mkdir out # clean up compiler output, create `out` dir. + +scala3/scalac -d $here/out $here/$1.scala # compile the first argument following `issue foo ` +``` + +It is similar to the previous example, except now you will compile a file `$1.scala`, referring +to the first argument passed after the issue name. The command invoked would look like +`issue foo original` to compile `original.scala`, and `issue foo alt` for `alt.scala`. + +In general, you can refer to arguments passed to the `issue ` command using +the dollar notation: `$1` for the first argument, `$2` for the second and so on. + +### Multiline Commands + +Inside a `launch.iss` file, one command can be spread accross multiple lines. For example, +if your command has multiple arguments, you can put each argument on a new line. + +Multiline commands can even have comments inbetween lines. This is useful +if you want to try variants of a command with optional arguments (such as configuration). +You can put the optional arguments on separate lines, and then decide when they are passed to +the command by placing `#` in front to convert it to a comment (i.e. the argument will +not be passed). This saves typing the same arguments each time you want to use them. + +The following `launch.iss` file is an example of how you can use multiline commands as a +template for solving issues that [run compiled code](../issues/testing.md#checking-program-output). It demonstrates configuring the +`scala3/scalac` command using compiler flags, which are commented out. +Put your favourite flags there for quick usage. + +```bash +$ (rm -rv out || true) && mkdir out # clean up compiler output, create `out` dir. + +scala3/scalac # Invoke the compiler task defined by the Dotty sbt project + -d $here/out # All the artefacts go to the `out` folder created earlier + # -Xprint:typer # Useful debug flags, commented out and ready for quick usage. Should you need one, you can quickly access it by uncommenting it. + # -Ydebug-error + # -Yprint-debug + # -Yprint-debug-owners + # -Yshow-tree-ids + # -Ydebug-tree-with-id 340 + # -Ycheck:all + $here/$1.scala # Invoke the compiler on the file passed as the second argument to the `issue` command. E.g. `issue foo Hello` will compile `Hello.scala` assuming the issue folder name is `foo`. + +scala3/scala -classpath $here/out Test # Run main method of `Test` generated by the compiler run. +``` + +## Conclusion + +In this section, you have seen how to reproduce an issue locally, and next you will see +how to try and detect its root cause. + +[lampepfl/dotty]: https://github.com/lampepfl/dotty/issues +[#7710]: https://github.com/lampepfl/dotty/issues/7710 +[dotty-issue-workspace]: https://github.com/anatoliykmetyuk/dotty-issue-workspace +[workspace-readme]: https://github.com/anatoliykmetyuk/dotty-issue-workspace#getting-started \ No newline at end of file diff --git a/docs/_docs/contributing/issues/testing.md b/docs/_docs/contributing/issues/testing.md new file mode 100644 index 000000000000..1f7c35c6d58a --- /dev/null +++ b/docs/_docs/contributing/issues/testing.md @@ -0,0 +1,212 @@ +--- +layout: doc-page +title: Testing Your Changes +--- + +It is important to add tests before a pull request, to verify that everything is working as expected, +and act as proof of what is valid/invalid Scala code (in case it is broken in the future). +In this section you will see the testing procedures in Scala 3. + +## Running all Tests + +Running all tests in Dotty is as simple as: + +```bash +$ sbt test +``` +Specifically, `sbt test` runs all tests that do _not_ require a bootstrapped +compiler. In practice, this means that it runs all compilation tests meeting +this criterion, as well as all non-compiler tests. + +To run all tests of Scala 3, including for compiler, REPL, libraries and more, run the following in sbt: + +```bash +$ sbt +sbt:scala3> scala3-bootstrapped/test +``` + +Often however it is not necessary to test everything if your changes are localised to one area, +you will see in the following sections the different kinds of tests, and how +to run individual tests. + +## Compilation Tests + +Compilation tests run the compiler over input files, using various settings. Input files +are found within the `tests/` directory at the root of the compiler repo. + +Test input files are categorised further by placing them in the subdirectories +of the `tests/` directory. A small selection of test categories include: + +- `tests/pos` – tests that should compile: pass if compiles successfully. +- `tests/neg` – should not compile: pass if fails compilation. Useful, e.g., to test an expected compiler error. +- `tests/run` – these tests not only compile but are also run. + +### Naming and Running a Test Case + +Tests are, by convention, named after the number of the issue they are fixing. +e.g. if you are fixing issue 101, then the test should be named `i101.scala`, for a single-file test, +or be within a directory called `i101/` for a multi-file test. + +To run the test, invoke the sbt command `testCompilation i101` (this will match all tests with `"i101"` in +the name, so it is useful to use a unique name) + +The test groups – `pos`, `neg`, etc. – are defined in [CompilationTests]. If you want to run a group +of tests, e.g. `pos`, you can do so via `testOnly *CompilationTests -- *pos` command. + +### Testing a Single Input File + +If your issue is reproducible by only one file, put that file under an appropriate category. +For example, if your issue is about getting rid of a spurious compiler error (that is a code that doesn't compile should, in fact, compile), you can create a file `tests/pos/i101.scala`. + +### Testing Multiple Input Files + +If you need more than one file to reproduce an issue, create a directory instead of a file +e.g. `tests/pos/i101/`, and put all the Scala files that are needed to reproduce the issue there. +There are two ways to organise the input files within: + +**1: Requiring classpath dependency:** Sometimes issues require one file to be compiled after the other, +(e.g. if the issue only happens with a library dependency, like with Java interop). In this case, +the outputs of the first file compiled will be available to the next file compiled, available via the classpath. +This is called *separate compilation*. + +To achieve this, within `tests/pos/i101/`, add a suffix `_n` to each file name, where `n` is an integer defining the +order in which the file will compile. E.g. if you have two files, `Lib.scala` and `Main.scala`, and you need them +compiled separately – Lib first, Main second, then name them `Lib_1.scala` and `Main_2.scala`. + +**2: Without classpath dependency:** If your issue does not require a classpath dependency, your files can be compiled +in a single run, this is called *joint compilation*. In this case use file names without the `_n` suffix. + +### Checking Program Output + +`tests/run` tests verify the run-time behaviour of a test case. The output is checked by invoking a main method +on a class `Test`, this can be done with either +```scala +@main def Test: Unit = assert(1 > 0) +``` +or +```scala +object Test extends scala.App: + assert(1 > 0) +``` + +If your program also prints output, this can be compared against `*.check` files. +These contain the expected output of a program. Checkfiles are named after the issue they are checking, +e.g. `tests/run/i101.check` will check either `tests/run/i101.scala` or `tests/run/i101/`. + +### Checking Compilation Errors + +`tests/neg` tests verify that a file does not compile, and user-facing errors are produced. There are other neg +categories such as `neg-custom-args`, i.e. with `neg` prefixing the directory name. Test files in the `neg*` +categories require annotations for the lines where errors are expected. To do this add one `// error` token to the +end of a line for each expected error. For example, if there are three expected errors, the end of the line should contain +`// error // error // error`. + +You can verify the content of the error messages with a `*.check` file. These contain the expected output of the +compiler. Checkfiles are named after the issue they are checking, +e.g. `i101.check` will check either `tests/neg/i101.scala` or `tests/neg/i101/`. +*Note:* checkfiles are not required for the test to pass, however they do add stronger constraints that the errors +are as expected. + +### If Checkfiles do not Match Output + +If the actual output mismatches the expected output, the test framework will dump the actual output in the file +`*.check.out` and fail the test suite. It will also output the instructions to quickly replace the expected output +with the actual output, in the following format: + +``` +Test output dumped in: tests/neg/Sample.check.out + See diff of the checkfile + > diff tests/neg/Sample.check tests/neg/Sample.check.out + Replace checkfile with current output + > mv tests/neg/Sample.check.out tests/neg/Sample.check +``` + +### Tips for creating Checkfiles + +To create a checkfile for a test, you can do one of the following: + +1. Create an empty checkfile + - then add arbitrary content + - run the test + - when it fails, use the `mv` command reported by the test to replace the initial checkfile with the actual output. +2. Manually compile the file you are testing with `scala3/scalac` + - copy-paste whatever console output the compiler produces to the checkfile. + +### Automatically Updating Checkfiles + +When complex or many checkfiles must be updated, `testCompilation` can run in a mode where it overrides the +checkfiles with the test outputs. +```bash +$ sbt +> testCompilation --update-checkfiles +``` + +Use `--help` to see all the options +```bash +$ sbt +> testCompilation --help +``` + +### Bootstrapped-only tests + +To run `testCompilation` on a bootstrapped Dotty compiler, use +`scala3-compiler-bootstrapped/testCompilation` (with the same syntax as above). +Some tests can only be run in bootstrapped compilers; that includes all tests +with `with-compiler` in their name. + +### From TASTy tests + +`testCompilation` has an additional mode to run tests that compile code from a `.tasty` file. +Modify the lists in [compiler/test/dotc] to enable or disable tests from `.tasty` files. + +```bash +$ sbt +> testCompilation --from-tasty +``` + +## Unit Tests + +Unit tests cover the other areas of the compiler, such as interactions with the REPL, scripting tools and more. +They are defined in [compiler/test], so if your use case isn't covered by this guide, +you may need to consult the codebase. Some common areas are highlighted below: + +### SemanticDB tests + +To test the SemanticDB output from the `extractSemanticDB` phase (enabled with the `-Xsemanticdb` flag), run the following sbt command: +```bash +$ sbt +sbt:scala3> scala3-compiler-bootstrapped/testOnly + dotty.tools.dotc.semanticdb.SemanticdbTests +``` + +[SemanticdbTests] uses source files in `tests/semanticdb/expect` to generate "expect files": +these verify both +- SemanticDB symbol occurrences inline in sourcecode (`*.expect.scala`) +- complete output of all SemanticDB information (`metac.expect`). + +Expect files are used as regression tests to detect changes in the compiler. +Their correctness is determined by human inspection. + +If expect files change then [SemanticdbTests] will fail, and generate new expect files, providing instructions for +comparing the differences and replacing the outdated expect files. + +If you are planning to update the SemanticDB output, you can do it in bulk by running the command +```bash +$ sbt +sbt:scala3> scala3-compiler/Test/runMain + dotty.tools.dotc.semanticdb.updateExpect +``` + +then compare the changes via version control. + +## Troubleshooting + +Some of the tests depend on temporary state stored in the `out` directory. In rare cases, that directory +can enter an inconsistent state and cause spurious test failures. If you suspect a spurious test failure, +you can run `rm -rf out/*` from the root of the repository and run your tests again. If that fails, you +can try `git clean -xfd`. + +[CompilationTests]: https://github.com/lampepfl/dotty/blob/master/compiler/test/dotty/tools/dotc/CompilationTests.scala +[compiler/test]: https://github.com/lampepfl/dotty/blob/master/compiler/test/ +[compiler/test/dotc]: https://github.com/lampepfl/dotty/tree/master/compiler/test/dotc +[SemanticdbTests]: https://github.com/lampepfl/dotty/blob/master/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala diff --git a/docs/_docs/contributing/procedures/index.md b/docs/_docs/contributing/procedures/index.md index 01c76f72c00c..db2b09dbe80f 100644 --- a/docs/_docs/contributing/procedures/index.md +++ b/docs/_docs/contributing/procedures/index.md @@ -2,3 +2,7 @@ layout: index title: Procedures --- + +This chapter of the guide describes: +- [How to release a procedure](./release.md) +- [How to test the vulpix framework](./vulpix.md) \ No newline at end of file diff --git a/docs/_docs/contributing/procedures/vulpix.md b/docs/_docs/contributing/procedures/vulpix.md index 5e8a2eab425b..1eea2fa24778 100644 --- a/docs/_docs/contributing/procedures/vulpix.md +++ b/docs/_docs/contributing/procedures/vulpix.md @@ -3,7 +3,6 @@ layout: doc-page title: Test Vulpix Framework --- -# Test Vulpix Framework If you are modifying the Vulpix framework and need a playground with dummy tests to try out your modifications, do the following. Create the directory structure for the playground: diff --git a/docs/_docs/contributing/testing.md b/docs/_docs/contributing/testing.md deleted file mode 100644 index a01cdb08f8ab..000000000000 --- a/docs/_docs/contributing/testing.md +++ /dev/null @@ -1,207 +0,0 @@ ---- -layout: doc-page -title: Testing in Dotty ---- - -Running all tests in Dotty is as simple as: - -```bash -$ sbt test -``` - -Specifically, `sbt test` runs all tests that do _not_ require a bootstrapped -compiler. In practice, this means that it runs all compilation tests meeting -this criterion, as well as all non-compiler tests. - -The entire suite of tests can be run using the bootstrapped compiler as follows: - -```bash -$ sbt -> scala3-bootstrapped/test -``` - -There are currently several forms of tests in Dotty. These can be split into -two categories: - -## Unit tests -These tests can be found in `/test` and are used to check -functionality of specific parts of the codebase in isolation e.g: parsing, -scanning and message errors. - -To run all tests in e.g., for the compiler test-suite you can write: - -```bash -$ sbt -> scala3-compiler/test -``` - -To run a single test class you use `testOnly` and the fully qualified class name. -For example: - -```bash -> testOnly dotty.tools.dotc.transform.TreeTransformerTest -``` - -The test command follows a regular expression-based syntax `testOnly * -- *`. -The right-hand side picks a range of names for methods and the left-hand side picks a range of class names and their -fully-qualified paths. - -Consequently, you can restrict the aforementioned executed test to a subset of methods by appending ``-- *method_name``. -The example below picks up all methods with the name `canOverwrite`: - -```bash -> testOnly dotty.tools.dotc.transform.TreeTransformerTest -- *canOverwrite -``` - -Additionally, you can run all tests named `method_name`, in any class, without providing a class name: - -```bash -> testOnly -- *canOverwrite -``` - -You can also run all paths of classes of a certain name: - -```bash -> testOnly *.TreeTransformerTest -``` - -### Testing with checkfiles -Some tests support checking the output of the run or the compilation against a checkfile. A checkfile is a file in which the expected output of the compilation or run is defined. A test against a checkfile fails if the actual output mismatches the expected output. - -Currently, the `run` and `neg` (compilation must fail for the test to succeed) tests support the checkfiles. `run`'s checkfiles contain an expected run output of the successfully compiled program. `neg`'s checkfiles contain an expected error output during compilation. - -Absence of a checkfile is **not** a condition for the test failure. E.g. if a `neg` test fails with the expected number of errors and there is no checkfile for it, the test still passes. - -Checkfiles are located in the same directories as the tests they check, have the same name as these tests with the extension `*.check`. E.g. if you have a test named `tests/neg/foo.scala`, you can create a checkfile for it named `tests/neg/foo.check`. And if you have a test composed of several files in a single directory, e.g. `tests/neg/manyScalaFiles`, the checkfile will be `tests/neg/manyScalaFiles.check`. - -If the actual output mismatches the expected output, the test framework will dump the actual output in the file `*.check.out` and fail the test suite. It will also output the instructions to quickly replace the expected output with the actual output, in the following format: - -``` -Test output dumped in: tests/playground/neg/Sample.check.out - See diff of the checkfile - > diff tests/playground/neg/Sample.check tests/playground/neg/Sample.check.out - Replace checkfile with current output - > mv tests/playground/neg/Sample.check.out tests/playground/neg/Sample.check -``` - -To create a checkfile for a test, you can do one of the following: - -- Create a dummy checkfile with a random content, run the test, and, when it fails, use the `mv` command reported by the test to replace the dummy checkfile with the actual output. -- Manually compile the file you are testing with `scalac` and copy-paste whatever console output the compiler produces to the checkfile. - -## Integration tests -These tests are Scala source files expected to compile with Dotty (pos tests), -along with their expected output (run tests) or errors (neg tests). - -All of these tests are contained in the `./tests/*` directories and can be run with the `testCompilation` command. Tests in folders named `with-compiler` are an exception, see next section. - -Currently to run these tests you need to invoke from sbt: - -```bash -$ sbt -> testCompilation -``` - -(which is effectively the same with `testOnly dotty.tools.dotc.CompilationTests`) - -It is also possible to run tests filtered, again from sbt: - -```bash -$ sbt -> testCompilation companions -``` - -This will run both the test `./tests/pos/companions.scala` and -`./tests/neg/companions.scala` since both of these match the given string. -This also means that you could run `testCompilation` with no arguments to run all integration tests. - -When complex checkfiles must be updated, `testCompilation` can run in a mode where it overrides the checkfiles with the test outputs. -```bash -$ sbt -> testCompilation --update-checkfiles -``` - -Use `--help` to see all the options -```bash -$ sbt -> testCompilation --help -``` - -### Joint and separate sources compilation - -When the sources of a test consist of multiple source files places in a single directory they are passed to the compiler in a single run and the compiler decides in which order to compile them. In some cases, however, to reproduce a specific test scenario it might be necessary to compile the source files in several steps in a specified order. To achieve that one can add a `_${step_index}` suffix to a file name (before the `.scala` or `.java` extension) indicating the order of compilation. E.g. if the test directory contains files named `Foo_1.scala`, `Bar_2.scala` and `Baz_2.scala` then `Foo_1.scala` will be compiled first and after that `Bar_2.scala` together with `Baz_2.scala`. - -The other kind of suffix that can modify how particular files are compiled is `_c${compilerVersion}`. When specified, the file will be compiled with a specific version of the compiler instead of the one developed on the current branch. - -Different suffixes can be mixed together (their order is not important although consistency is advised), e.g. `Foo_1_c3.0.2`, `Bar_2_c3.1.0`. - -### Bootstrapped-only tests - -To run `testCompilation` on a bootstrapped Dotty compiler, use -`scala3-compiler-bootstrapped/testCompilation` (with the same syntax as above). -Some tests can only be run in bootstrapped compilers; that includes all tests -with `with-compiler` in their name. - -### From TASTy tests - -`testCompilation` has an additional mode to run tests that compile code from a `.tasty` file. - Modify blacklist and whitelists in `compiler/test/dotc` to enable or disable tests from `.tasty` files. - - ```bash - $ sbt - > testCompilation --from-tasty - ``` - - This mode can be run under `scala3-compiler-bootstrapped/testCompilation` to test on a bootstrapped Dotty compiler. - -### SemanticDB tests - -```bash -$ sbt -> scala3-compiler-bootstrapped/testOnly dotty.tools.dotc.semanticdb.SemanticdbTests -``` - -The output of the `extractSemanticDB` phase, enabled with `-Xsemanticdb` is tested with the bootstrapped JUnit test -`dotty.tools.dotc.semanticdb.SemanticdbTests`. It uses source files in `tests/semanticdb/expect` to generate -two kinds of output file that are compared with "expect files": placement of semanticdb symbol occurrences inline in -sourcecode (`*.expect.scala`), for human verification by inspection; and secondly metap formatted output which outputs -all information stored in semanticdb (`metac.expect`). -Expect files are used as regression tests to detect changes in the compiler. - -The test suite will create a new file if it detects any difference, which can be compared with the -original expect file, or if the user wants to globally replace all expect files for semanticdb they can use -`scala3-compiler-bootstrapped/test:runMain dotty.tools.dotc.semanticdb.updateExpect`, and compare the changes via version -control. - -### Test regimes - -Continuous integration, managed by GitHub Actions, does not run all jobs when a pull request is created. -In particular, test jobs for testing under JDK 8 and Windows are not run. Those jobs are run only for the nightly build. - -If a PR may fail differentially under either JDK 8 or Windows, the test jobs may be triggered by adding -a special command to the PR comment text: - -``` -[test_java8] -[test_windows_full] -``` -Furthermore, CI tests are bootstrapped. A job to also run tests non-bootstrapped may be triggered manually: -``` -[test_non_bootstrapped] -``` -A trivial PR, such as a fix for a typo in a comment or when contributing other documentation, may benefit by skipping CI tests altogether: -``` -[skip ci] -``` -Other jobs which are normally run can also be selectively skipped: -``` -[skip community_build] -[skip test_windows_fast] -``` - -## Troubleshooting - -Some of the tests depend on temporary state stored in the `out` directory. In rare cases, that directory -can enter an inconsistent state and cause spurious test failures. If you suspect a spurious test failure, -you can run `rm -rf out/*` from the root of the repository and run your tests again. If that fails, you -can try `git clean -xfd`. diff --git a/docs/_docs/contributing/tools/index.md b/docs/_docs/contributing/tools/index.md index 92503ee82013..e784e3e15d61 100644 --- a/docs/_docs/contributing/tools/index.md +++ b/docs/_docs/contributing/tools/index.md @@ -2,3 +2,8 @@ layout: index title: IDEs and Tools --- + +This chapter of the guide describes how to use Dotty with IDEs and other tools: +- [IDEs](./ide.md) +- [Use Mill](./mill.md) +- [Use Scalafix](./scalafix.md) diff --git a/docs/_docs/contributing/tools/scalafix.md b/docs/_docs/contributing/tools/scalafix.md index 58c7d0eb7b3a..30c7050f8b3e 100644 --- a/docs/_docs/contributing/tools/scalafix.md +++ b/docs/_docs/contributing/tools/scalafix.md @@ -3,8 +3,6 @@ layout: doc-page title: Working with Scalafix --- -# Working with Scalafix - First, create a new rule as follows (command from https://scalacenter.github.io/scalafix/docs/developers/setup.html): ```bash diff --git a/docs/_docs/contributing/workflow.md b/docs/_docs/contributing/workflow.md index 956ce2998c75..1d11dc61a6bf 100644 --- a/docs/_docs/contributing/workflow.md +++ b/docs/_docs/contributing/workflow.md @@ -103,8 +103,29 @@ The basics of working with Dotty codebase are documented [here](https://dotty.ep | Command | Description | |------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------| +| `scala3/scalac` | Run the compiler directly, with any current changes. | +| `scala3/scala` | Run the main method of a given class name. | | `scalac ../issues/Playground.scala` | Compile the given file – path relative to the Dotty directory. Output the compiled class files to the Dotty directory itself. | | `scala Playground` | Run the compiled class `Playground`. Dotty directory is on classpath by default. | | `repl` | Start REPL | +| `scala3/scalac -print-tasty Foo.tasty` | Print the TASTy of top-level class `Foo` | +| `scala3-bootstrapped/test` | Run all tests for Scala 3. (Slow, recommended for CI only) | +| `scala3-bootstrapped/publishLocal` | Build Scala 3 locally. (Use to debug a specific project) | +| `scalac ../issues/Playground.scala` | Compile the given file – path relative to the Dotty directory. Output the compiled class files to the Dotty directory itself.| | `testOnly dotty.tools.dotc.CompilationTests -- *pos` | Run test (method) `pos` from `CompilationTests` suite. | | `testCompilation sample` | In all test suites, run test files containing the word `sample` in their title. | +| `scala3-compiler/Test/runMain dotty.tools.printTypes`| Print types underlying representation | +| `scaladoc/generateScalaDocumentation` | Build the documentation website (published to https://dotty.epfl.ch) | +| `scaladoc/generateReferenceDocumentation` | Build the reference documentation website (published to https://docs.scala-lang.org/scala3/reference) | + + +## Shell Commands + +| Command | Description | +|--------------------------------------|------------------------------------------------------------------| +| `rm -rv *.tasty *.class out || true` | clean all compiled artifacts, from root dotty directory | + + + + + diff --git a/docs/_docs/index.md b/docs/_docs/index.md index 97dc7fd5886b..e61313d81a4a 100644 --- a/docs/_docs/index.md +++ b/docs/_docs/index.md @@ -1,19 +1,6 @@ --- layout: index redirectFrom: /docs/index.html -nightlyOf: https://docs.scala-lang.org/scala3/reference/ --- -Dotty is the project name for technologies that are considered for inclusion in Scala 3. Scala has -pioneered the fusion of object-oriented and functional programming in a typed setting. Scala 3 will -be a big step towards realizing the full potential of these ideas. Its main objectives are to - -- become more opinionated by promoting programming idioms we found to work well, -- simplify where possible, -- eliminate inconsistencies and surprising behaviors, -- build on strong foundations to ensure the design hangs well together, -- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and performance. - -In this documentation you will find information on how to use the Dotty compiler on your machine, -navigate through the code, setup Dotty with your favorite IDE and more! - +This website contains the developer documentation of the Scala 3 compiler. It targets developers interested in contributing to the compiler, or learning its internals. If you want to learn how to use Scala, go [here](https://docs.scala-lang.org/). diff --git a/docs/_docs/internals/backend.md b/docs/_docs/internals/backend.md index e3215c3993ae..660f6e1f41e5 100644 --- a/docs/_docs/internals/backend.md +++ b/docs/_docs/internals/backend.md @@ -6,8 +6,13 @@ title: "Backend Internals" The code for the JVM backend is split up by functionality and assembled in `GenBCode.scala`. This file defines class `GenBCode`, the compiler phase. +The workflow is split into `CodeGen.scala` Scala compilation context aware responsible for emitting bytecode, +and `PostProcessor.scala` which can be used for parallelized, context agnostic processing. In Scala 2 `PostProcessor`, +was responsible for performing bytecode optimization, e.g. inlining method calls. In Scala 3 it is only used for writing +Class files and Tasty to disk. + ``` -class GenBCodePipeline -[defines]--> PlainClassBuilder +class CodeGen.Impl -[defines]--> PlainClassBuilder | | [extends] [extends] | | @@ -18,14 +23,14 @@ BCodeBodyBuilder ----------------> PlainBodyBuilder BCodeSkelBuilder ----------------> PlainSkelBuilder | / | \ BCodeHelpers ----------------> BCClassGen BCAnnotGen ... (more components) - | | \ - | | \-------------> helper methods - | | \------------> JMirrorBuilder, JBeanInfoBuilder (uses some components, e.g. BCInnerClassGen) - | | - | BytecodeWriters ---------> methods and classes to write byte code files + | \ + | \-------------> helper methods + | \------------> JMirrorBuilder, JAndroidBuilder (uses some components, e.g. BCInnerClassGen) + | \-----------> `backendUtils`: utility for bytecode related ops, contains mapping for supported classfile version | BCodeIdiomatic ----------------> utilities for code generation, e.g. genPrimitiveArithmetic \--------------> `bTypes`: maps and fields for common BTypes + \-------------> `int`: synchronized interface between PostProcessor and compiltion ctx ``` The `BTypes.scala` class contains the `BType` class and predefined BTypes @@ -34,28 +39,33 @@ The `BTypes.scala` class contains the `BType` class and predefined BTypes Compiler creates a `GenBCode` `Phase`, calls `runOn(compilationUnits)`, which calls `run(context)`. This: -* initializes `myPrimitives` defined in `DottyPrimitives` (maps primitive - members, like `int.+`, to bytecode instructions) -* creates a `GenBCodePipeline` and calls `run(tree)` - -`GenBCodePipeline` now: - -* initializes the `bTypes` field of `GenBCodePipeline` defined in `BCodeIdiomatic` - (BType maps, common BTypes like `StringRef`) -* creates `BytecodeWriter` and `JMirrorBuilder` instances (on each compiler run) -* `buildAndSendToDisk(units)`: uses work queues, see below. - - `GenBCodePipeline.feedPipeline1` adds ClassDefs to `q1` - - `Worker1.run` creates ASM `ClassNodes`, adds to `q2`. It creates one - `PlainClassBuilder` for each compilation unit. - - `Worker2.run` adds byte arrays (one for each class) to `q3` - - `GenBCodePipeline.drainQ3` writes byte arrays to disk +* initializes lazily components reused by all `compilationUnits` using same instance of Context: + - `bTypes`, used by `CodeGen` and `PostProcessro`, defined in `BCodeIdiomatic` (BType maps, common BTypes like `StringRef`) + - `backendInterface:` - proxy to Context specific operations + - `codeGen: CodeGen` - uses `backendInterface`, `bTypes`, initializes instance of `DottyPrimitives` and defines `JMirrorBuilder` instance and implements bytecode generation flow (maps primitive members, like `int.+`, to bytecode instructions) + - `fontendAccess` - synchronized `PostProcessor` interface to compiler settings, reporting and GenBCode context (e.g. list of entrypoints) + - `postProcessor` - compilation context agnostic module dedicated to parallel processing of produced bytecode. Currently used only for writing Tasty and Class files. Defines `backendUtils` and `classfileWriter` +* sets context of current compilation unit to the shared context instance +* calls `codeGen.genUnit(ctx.compilation)` which returns structure with generated definitions (both Class files and Tasty) +* calls postProcessing of generated definition in `postProcessor` +* calls registered callbacks if needed for every generated class + +Upon calling `codeGen.genUnit` it: +* creates `PlainClassBuilder` instance for each generated `TypeDef` and creates ASM `ClassNode` +* creates optional mirror class if needed +* generates Tasty file content and store its attributes in either mirror or plain class node + +`PostProcessor` is later: +* enriching `ClassNode` with collected serializable lambdas +* sets its inner classes +* serializes class and writes it to file, optionally it can execute register callbacks for each generated file +* writes generated Tasty to file ## Architecture ## The architecture of `GenBCode` is the same as in Scalac. It can be partitioned into weakly coupled components (called "subsystems" below): - ### (a) The queue subsystem ### Queues mediate between processors, queues don't know what each processor does. @@ -126,4 +136,4 @@ emitting: ### (f) Building an ASM ClassNode given an AST TypeDef ### -It's done by `PlainClassBuilder`(see `GenBCode.scala`). +It's done by `PlainClassBuilder`(see `CodeGen.scala`). diff --git a/docs/_docs/internals/core-data-structures.md b/docs/_docs/internals/core-data-structures.md deleted file mode 100644 index d42a24f0e426..000000000000 --- a/docs/_docs/internals/core-data-structures.md +++ /dev/null @@ -1,117 +0,0 @@ ---- -layout: doc-page -title: Core Data Structures ---- - -(The following is work in progress) - -## Symbols and SymDenotations - - - why symbols are not enough: their contents change all the time - - they change themselvesSo a `Symbol` - - reference: string + sig - - -Dotc is different from most other compilers in that it is centered around the idea of -maintaining views of various artifacts associated with code. These views are indexed -by tne - -A symbol refers to a definition in a source program. Traditionally, - compilers store context-dependent data in a _symbol table_. The - symbol then is the central reference to address context-dependent - data. But for `scalac`'s requirements it turns out that symbols are - both too little and too much for this task. - -Too little: The attributes of a symbol depend on the phase. Examples: -Types are gradually simplified by several phases. Owners are changed -in phases `LambdaLift` (when methods are lifted out to an enclosing -class) and Flatten (when all classes are moved to top level). Names -are changed when private members need to be accessed from outside -their class (for instance from a nested class or a class implementing -a trait). So a functional compiler, a `Symbol` by itself met mean -much. Instead we are more interested in the attributes of a symbol at -a given phase. - -`scalac` has a concept for "attributes of a symbol at - -Too much: If a symbol is used to refer to a definition in another -compilation unit, we get problems for incremental recompilation. The -unit containing the symbol might be changed and recompiled, which -might mean that the definition referred to by the symbol is deleted or -changed. This leads to the problem of stale symbols that refer to -definitions that no longer exist in this form. Scala 2 compiler tried to -address this problem by _rebinding_ symbols appearing in certain cross -module references, but it turned out to be too difficult to do this -reliably for all kinds of references. Scala 3 compiler attacks the problem at -the root instead. The fundamental problem is that symbols are too -specific to serve as a cross-module reference in a system with -incremental compilation. They refer to a particular definition, but -that definition may not persist unchanged after an edit. - -`scalac` uses instead a different approach: A cross module reference is -always type, either a `TermRef` or ` TypeRef`. A reference type contains -a prefix type and a name. The definition the type refers to is established -dynamically based on these fields. - - -a system where sources can be recompiled at any instance, - - the concept of a `Denotation`. - - Since definitions are transformed by phases, - - -The [Dotty project](https://github.com/lampepfl/dotty) -is a platform to develop new technology for Scala -tooling and to try out concepts of future Scala language versions. -Its compiler is a new design intended to reflect the -lessons we learned from work with the Scala compiler. A clean redesign -today will let us iterate faster with new ideas in the future. - -Today we reached an important milestone: The Dotty compiler can -compile itself, and the compiled compiler can act as a drop-in for the -original one. This is what one calls a *bootstrap*. - -## Why is this important? - -The main reason is that this gives us a some validation of the -*trustworthiness* of the compiler itself. Compilers are complex beasts, -and many things can go wrong. By far the worst things that can go -wrong are bugs where incorrect code is produced. It's not fun debugging code that looks perfectly -fine, yet gets translated to something subtly wrong by the compiler. - -Having the compiler compile itself is a good test to demonstrate that -the generated code has reached a certain level of quality. Not only is -a compiler a large program (44k lines in the case of dotty), it is -also one that exercises a large part of the language in quite -intricate ways. Moreover, bugs in the code of a compiler don't tend to -go unnoticed, precisely because every part of a compiler feeds into -other parts and all together are necessary to produce a correct -translation. - -## Are We Done Yet? - -Far from it! The compiler is still very rough. A lot more work is -needed to - - - make it more robust, in particular when analyzing incorrect programs, - - improve error messages and warnings, - - improve the efficiency of some of the generated code, - - embed it in external tools such as sbt, REPL, IDEs, - - remove restrictions on what Scala code can be compiled, - - help in migrating Scala code that will have to be changed. - -## What Are the Next Steps? - -Over the coming weeks and months, we plan to work on the following topics: - - - Make snapshot releases. - - Get the Scala standard library to compile. - - Work on SBT integration of the compiler. - - Work on IDE support. - - Investigate the best way to obtaining a REPL. - - Work on the build infrastructure. - -If you want to get your hands dirty with any of this, now is a good moment to get involved! -To get started: . - diff --git a/docs/_docs/internals/dotc-scalac.md b/docs/_docs/internals/dotc-scalac.md index 3f88502934b7..03baad375eb1 100644 --- a/docs/_docs/internals/dotc-scalac.md +++ b/docs/_docs/internals/dotc-scalac.md @@ -6,7 +6,50 @@ title: "Differences between Scalac and Dotty" Overview explanation how symbols, named types and denotations hang together: [Denotations1] -## Denotation ## +## Some background + +Dotc is different from most other compilers in that it is centered around the +idea of maintaining views of various artifacts associated with code. These views +are indexed by tne. + +A symbol refers to a definition in a source program. Traditionally, compilers +store context-dependent data in a _symbol table_. The symbol then is the central +reference to address context-dependent data. But for `scalac`'s requirements it +turns out that symbols are both too little and too much for this task. + +### Too little + +The attributes of a symbol depend on the phase. Examples: Types are +gradually simplified by several phases. Owners are changed in phases +`LambdaLift` (when methods are lifted out to an enclosing class) and Flatten +(when all classes are moved to top level). Names are changed when private +members need to be accessed from outside their class (for instance from a nested +class or a class implementing a trait). So a functional compiler, a `Symbol` by +itself met mean much. Instead we are more interested in the attributes of a +symbol at a given phase. + +### Too much + +If a symbol is used to refer to a definition in another compilation unit, we get +problems for incremental recompilation. The unit containing the symbol might be +changed and recompiled, which might mean that the definition referred to by the +symbol is deleted or changed. This leads to the problem of stale symbols that +refer to definitions that no longer exist in this form. Scala 2 compiler tried +to address this problem by _rebinding_ symbols appearing in certain cross module +references, but it turned out to be too difficult to do this reliably for all +kinds of references. Scala 3 compiler attacks the problem at the root instead. +The fundamental problem is that symbols are too specific to serve as a +cross-module reference in a system with incremental compilation. They refer to a +particular definition, but that definition may not persist unchanged after an +edit. + +`scalac` uses instead a different approach: A cross module reference is always +type, either a `TermRef` or ` TypeRef`. A reference type contains a prefix type +and a name. The definition the type refers to is established dynamically based +on these fields. + +## Denotation + Comment with a few details: [Denotations2] A `Denotation` is the result of a name lookup during a given period @@ -21,7 +64,7 @@ A `Denotation` is the result of a name lookup during a given period Denotations of methods have a signature ([Signature1]), which uniquely identifies overloaded methods. -### Denotation vs. SymDenotation ### +### Denotation vs. SymDenotation A `SymDenotation` is an extended denotation that has symbol-specific properties (that may change over phases) * `flags` @@ -31,7 +74,7 @@ A `SymDenotation` is an extended denotation that has symbol-specific properties `SymDenotation` implements lazy types (similar to scalac). The type completer assigns the denotation's `info`. -### Implicit Conversion ### +### Implicit Conversion There is an implicit conversion: ```scala core.Symbols.toDenot(sym: Symbol)(implicit ctx: Context): SymDenotation @@ -42,7 +85,7 @@ implicit conversion does **not** need to be imported, it is part of the implicit scope of the type `Symbol` (check the Scala spec). However, it can only be applied if an implicit `Context` is in scope. -## Symbol ## +## Symbol * `Symbol` instances have a `SymDenotation` * Most symbol properties in the Scala 2 compiler are now in the denotation (in the Scala 3 compiler). @@ -57,7 +100,7 @@ if (sym is Flags.PackageClass) // Scala 3 (*) `(*)` Symbols are implicitly converted to their denotation, see above. Each `SymDenotation` has flags that can be queried using the `is` method. -## Flags ## +## Flags * Flags are instances of the value class `FlagSet`, which encapsulates a `Long` * Each flag is either valid for types, terms, or both @@ -74,7 +117,7 @@ if (sym is Flags.PackageClass) // Scala 3 (*) `ModuleVal` / `ModuleClass` for either of the two. * `flags.is(Method | Param)`: true if `flags` has either of the two -## Tree ## +## Tree * Trees don't have symbols - `tree.symbol` is `tree.denot.symbol` - `tree.denot` is `tree.tpe.denot` where the `tpe` is a `NamdedType` (see @@ -86,13 +129,10 @@ if (sym is Flags.PackageClass) // Scala 3 (*) obtained from the symbol that the type refers to. This symbol is searched using `prefix.member(name)`. - -## Type ## +## Type * `MethodType(paramSyms, resultType)` from scalac => `mt @ MethodType(paramNames, paramTypes)`. Result type is `mt.resultType` -`@todo` - [Denotations1]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Denotations.scala#L27-L72 [Denotations2]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Denotations.scala#L77-L103 [Signature1]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Signature.scala#L9-L33 diff --git a/docs/_docs/internals/overall-structure.md b/docs/_docs/internals/overall-structure.md index f50ab6bf03a7..5bb43eb946a8 100644 --- a/docs/_docs/internals/overall-structure.md +++ b/docs/_docs/internals/overall-structure.md @@ -104,7 +104,6 @@ phases. The current list of phases is specified in class [Compiler] as follows: List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files List(new PostTyper) :: // Additional checks and cleanups after type checking List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) - List(new Staging) :: // Check PCP, heal quoted types and expand macros List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols Nil @@ -112,6 +111,10 @@ phases. The current list of phases is specified in class [Compiler] as follows: /** Phases dealing with TASTY tree pickling and unpickling */ protected def picklerPhases: List[List[Phase]] = List(new Pickler) :: // Generate TASTY info + List(new Inlining) :: // Inline and execute macros + List(new PostInlining) :: // Add mirror support for inlined code + List(new Staging) :: // Check staging levels and heal staged types + List(new Splicing) :: // Replace level 1 splices with holes List(new PickleQuotes) :: // Turn quoted trees into explicit run-time data structures Nil diff --git a/docs/_docs/internals/syntax-3.1.md b/docs/_docs/internals/syntax-3.1.md index 4d4d3b6d858d..0104222f50f5 100644 --- a/docs/_docs/internals/syntax-3.1.md +++ b/docs/_docs/internals/syntax-3.1.md @@ -11,7 +11,7 @@ hexadecimal code: ```ebnf UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit -hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ ``` Informal descriptions are typeset as `“some comment”`. @@ -22,15 +22,15 @@ form. ```ebnf whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ -upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” -lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” -letter ::= upper | lower “… and Unicode categories Lo, Lt, Lm, Nl” -digit ::= ‘0’ | … | ‘9’ +upper ::= ‘A’ | ... | ‘Z’ | ‘\$’ | ‘_’ “... and Unicode category Lu” +lower ::= ‘a’ | ... | ‘z’ “... and Unicode category Ll” +letter ::= upper | lower “... and Unicode categories Lo, Lt, Lm, Nl” +digit ::= ‘0’ | ... | ‘9’ paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ | ‘'(’ | ‘'[’ | ‘'{’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ - “… and Unicode categories Sm, So” + “... and Unicode categories Sm, So” printableChar ::= “all characters in [\u0020, \u007E] inclusive” charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) @@ -49,7 +49,7 @@ integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | nonZeroDigit {digit} hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit} digit ::= ‘0’ | nonZeroDigit -nonZeroDigit ::= ‘1’ | … | ‘9’ +nonZeroDigit ::= ‘1’ | ... | ‘9’ floatingPointLiteral ::= digit {digit} ‘.’ {digit} [exponentPart] [floatType] diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index bae8e6d3ec8d..2817a7477b10 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -20,51 +20,46 @@ productions map to AST nodes. The following description of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. -_Unicode escapes_ are used to represent the [Unicode character](https://www.w3.org/International/articles/definitions-characters/) with the given -hexadecimal code: - -```ebnf -UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit -hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ -``` - -Informal descriptions are typeset as `“some comment”`. - ## Lexical Syntax -The lexical syntax of Scala is given by the following grammar in EBNF -form. +The lexical syntax of Scala is given by the following grammar in EBNF form: ```ebnf whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ -upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” -lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” -letter ::= upper | lower “… and Unicode categories Lo, Lt, Lm, Nl” -digit ::= ‘0’ | … | ‘9’ +upper ::= ‘A’ | ... | ‘Z’ | ‘$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that doesn't have + contributory property Other_Lowercase +lower ::= ‘a’ | ... | ‘z’ | ‘_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase +letter ::= upper | lower +digit ::= ‘0’ | ... | ‘9’ paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ - “… and Unicode categories Sm, So” -printableChar ::= “all characters in [\u0020, \u007E] inclusive” + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) +escapeSeq ::= UnicodeEscape | charEscapeSeq op ::= opchar {opchar} varid ::= lower idrest -alphaid ::= upper idrest - | varid +boundvarid ::= varid + | ‘`’ varid ‘`’ plainid ::= alphaid | op id ::= plainid - | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] -decimalNumeral ::= ‘0’ | nonZeroDigit [{digit | ‘_’} digit] +decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] -nonZeroDigit ::= ‘1’ | … | ‘9’ floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] @@ -75,25 +70,25 @@ floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ booleanLiteral ::= ‘true’ | ‘false’ -characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ stringLiteral ::= ‘"’ {stringElement} ‘"’ | ‘"""’ multiLineChars ‘"""’ -stringElement ::= printableChar \ (‘"’ | ‘\’) - | UnicodeEscape - | charEscapeSeq -multiLineChars ::= {[‘"’] [‘"’] char \ ‘"’} {‘"’} -processedStringLiteral - ::= alphaid ‘"’ {[‘\’] processedStringPart | ‘\\’ | ‘\"’} ‘"’ - | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ -processedStringPart +stringElement ::= charNoDoubleQuoteOrNewline + | escapeSeq +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} + +interpolatedString + ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape -escape ::= ‘$$’ - | ‘$’ letter { letter | digit } - | ‘{’ Block [‘;’ whiteSpace stringFormat whiteSpace] ‘}’ -stringFormat ::= {printableChar \ (‘"’ | ‘}’ | ‘ ’ | ‘\t’ | ‘\n’)} - -symbolLiteral ::= ‘'’ plainid // until 2.13 +escape ::= ‘\$\$’ + | ‘\$"’ + | ‘\$’ alphaid + | ‘\$’ BlockExpr +alphaid ::= upper idrest + | varid comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ | ‘//’ “any sequence of characters up to end of line” @@ -140,7 +135,7 @@ type val var while with yield ### Soft keywords ``` -as derives end extension infix inline opaque open throws transparent using | * + - +as derives end erased extension infix inline opaque open throws transparent using | * + - ``` See the [separate section on soft keywords](../reference/soft-modifier.md) for additional @@ -159,7 +154,7 @@ SimpleLiteral ::= [‘-’] integerLiteral | characterLiteral | stringLiteral Literal ::= SimpleLiteral - | processedStringLiteral + | interpolatedStringLiteral | symbolLiteral | ‘null’ @@ -180,13 +175,13 @@ Type ::= FunType | FunParamClause ‘=>>’ Type TermLambdaTypeTree(ps, t) | MatchType | InfixType -FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) +FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) | FunctionWithMods(ts, t, mods, erasedParams) | HKTypeParamClause '=>' Type PolyFunction(ps, t) FunTypeArgs ::= InfixType | ‘(’ [ FunArgTypes ] ‘)’ | FunParamClause FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ -TypedFunParam ::= id ‘:’ Type +TypedFunParam ::= [`erased`] id ‘:’ Type MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) @@ -207,11 +202,12 @@ Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id Singletons ::= Singleton { ‘,’ Singleton } -FunArgType ::= Type - | ‘=>’ Type PrefixOp(=>, t) +FunArgType ::= [`erased`] Type + | [`erased`] ‘=>’ Type PrefixOp(=>, t) FunArgTypes ::= FunArgType { ‘,’ FunArgType } ParamType ::= [‘=>’] ParamValueType -ParamValueType ::= Type [‘*’] PostfixOp(t, "*") +ParamValueType ::= [‘into’] ExactParamType Into(t) +ExactParamType ::= ParamValueType [‘*’] PostfixOp(t, "*") TypeArgs ::= ‘[’ Types ‘]’ ts Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) @@ -228,7 +224,7 @@ BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block | HkTypeParamClause ‘=>’ Block | Expr1 FunParams ::= Bindings - | id + | [`erased`] id | ‘_’ Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] If(Parens(cond), thenp, elsep?) | [‘inline’] ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) @@ -318,7 +314,10 @@ TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } TypeCaseClause ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi] Pattern ::= Pattern1 { ‘|’ Pattern1 } Alternative(pats) -Pattern1 ::= Pattern2 [‘:’ RefinedType] Bind(name, Typed(Ident(wildcard), tpe)) +Pattern1 ::= PatVar ‘:’ RefinedType Bind(name, Typed(Ident(wildcard), tpe)) + | [‘-’] integerLiteral ‘:’ RefinedType Typed(pat, tpe) + | [‘-’] floatingPointLiteral ‘:’ RefinedType Typed(pat, tpe) + | Pattern2 Pattern2 ::= [id ‘@’] InfixPattern [‘*’] Bind(name, pat) InfixPattern ::= SimplePattern { id [nl] SimplePattern } InfixOp(pat, op, pat) SimplePattern ::= PatVar Ident(wildcard) @@ -343,9 +342,6 @@ ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) -DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds - TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -359,18 +355,29 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param -Param ::= id ‘:’ ParamType [‘=’ Expr] -DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] -DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause -UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | FunArgTypes) ‘)’ -DefParams ::= DefParam {‘,’ DefParam} -DefParam ::= {Annotation} [‘inline’] Param ValDef(mods, id, tpe, expr) -- point of mods at id. +DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent +DefParamClause ::= DefTypeParamClause + | DefTermParamClause + | UsingParamClause +TypelessClauses ::= TypelessClause {TypelessClause} +TypelessClause ::= DefTermParamClause + | UsingParamClause + +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ +DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ + +DefTermParams ::= DefTermParam {‘,’ DefTermParam} +DefTermParam ::= {Annotation} [`erased`] [‘inline’] Param ValDef(mods, id, tpe, expr) -- point of mods at id. +Param ::= id ‘:’ ParamType [‘=’ Expr] ``` ### Bindings and Imports ```ebnf -Bindings ::= ‘(’ [Binding {‘,’ Binding}] ‘)’ +Bindings ::= ‘(’[`erased`] [Binding {‘,’ [`erased`] Binding}] ‘)’ Binding ::= (id | ‘_’) [‘:’ Type] ValDef(_, id, tpe, EmptyTree) Modifier ::= LocalModifier @@ -415,8 +422,8 @@ Dcl ::= RefineDcl | ‘var’ VarDcl ValDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) VarDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) -DefDcl ::= DefSig ‘:’ Type DefDef(_, name, tparams, vparamss, tpe, EmptyTree) -DefSig ::= id [DefTypeParamClause] DefParamClauses +DefDcl ::= DefSig ‘:’ Type DefDef(_, name, paramss, tpe, EmptyTree) +DefSig ::= id [DefParamClauses] [DefImplicitClause] TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] @@ -427,8 +434,8 @@ Def ::= ‘val’ PatDef | TmplDef PatDef ::= ids [‘:’ Type] ‘=’ Expr | Pattern2 [‘:’ Type] ‘=’ Expr PatDef(_, pats, tpe?, expr) -DefDef ::= DefSig [‘:’ Type] ‘=’ Expr DefDef(_, name, tparams, vparamss, tpe, expr) - | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr DefDef(_, , Nil, vparamss, EmptyTree, expr | Block) +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr DefDef(_, name, paramss, tpe, expr) + | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef @@ -440,10 +447,10 @@ ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefParamClause`, `UsingParamClause` must be present +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} - ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods + ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef | Export diff --git a/docs/_docs/reference/changed-features/implicit-resolution.md b/docs/_docs/reference/changed-features/implicit-resolution.md index bf15baa3299c..6a898690b565 100644 --- a/docs/_docs/reference/changed-features/implicit-resolution.md +++ b/docs/_docs/reference/changed-features/implicit-resolution.md @@ -67,7 +67,8 @@ Opaque type aliases count as anchors only outside the scope where their alias is 1. If _T_ is a reference to a type parameter, the union of the anchors of both of its bounds. 1. If _T_ is a singleton reference, the anchors of its underlying type, plus, if _T_ is of the form _(P#x).type_, the anchors of _P_. - 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object. + 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object, + 1. If _T_ is some other this-type _P.this.type_, the anchors of _P_. 1. If _T_ is some other type, the union of the anchors of each constituent type of _T_. **Definition:** The _implicit scope_ of a type _T_ is the smallest set _S_ of term references such that diff --git a/docs/_docs/reference/changed-features/imports.md b/docs/_docs/reference/changed-features/imports.md index 2058ef08b7db..b322a6a58393 100644 --- a/docs/_docs/reference/changed-features/imports.md +++ b/docs/_docs/reference/changed-features/imports.md @@ -46,7 +46,7 @@ are offered under settings `-source 3.1-migration -rewrite`. ## Syntax -``` +```ebnf Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec | SimpleRef `as` id diff --git a/docs/_docs/reference/changed-features/interpolation-escapes.md b/docs/_docs/reference/changed-features/interpolation-escapes.md index 594e7671c5ab..4abeabdce3ac 100644 --- a/docs/_docs/reference/changed-features/interpolation-escapes.md +++ b/docs/_docs/reference/changed-features/interpolation-escapes.md @@ -4,7 +4,7 @@ title: "Escapes in interpolations" nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/interpolation-escapes.html --- -In Scala 2 there is no straightforward way to represent a single quote character `"` in a single quoted interpolation. A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. +In Scala 2 there is no straightforward way to represent a double-quote character `"` in a quoted interpolation (except in triple-quote interpolation). A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. In Scala 3, we can use the `$` meta character of interpolations to escape a `"` character. Example: diff --git a/docs/_docs/reference/changed-features/match-syntax.md b/docs/_docs/reference/changed-features/match-syntax.md index dba50e9beb6a..3f4d608e261f 100644 --- a/docs/_docs/reference/changed-features/match-syntax.md +++ b/docs/_docs/reference/changed-features/match-syntax.md @@ -47,7 +47,7 @@ The syntactical precedence of match expressions has been changed. The new syntax of match expressions is as follows. -``` +```ebnf InfixExpr ::= ... | InfixExpr MatchClause SimpleExpr ::= ... diff --git a/docs/_docs/reference/changed-features/overload-resolution.md b/docs/_docs/reference/changed-features/overload-resolution.md index bd7782ded520..621515c2a7f8 100644 --- a/docs/_docs/reference/changed-features/overload-resolution.md +++ b/docs/_docs/reference/changed-features/overload-resolution.md @@ -66,11 +66,11 @@ as follows: Replace the sentence -> Otherwise, let `S1,…,Sm` be the vector of types obtained by typing each argument with an undefined expected type. +> Otherwise, let `S1,...,Sm` be the vector of types obtained by typing each argument with an undefined expected type. with the following paragraph: -> Otherwise, let `S1,…,Sm` be the vector of known types of all argument types, where the _known type_ of an argument `E` +> Otherwise, let `S1,...,Sm` be the vector of known types of all argument types, where the _known type_ of an argument `E` is determined as followed: - If `E` is a function value `(p_1, ..., p_n) => B` that misses some parameter types, the known type diff --git a/docs/_docs/reference/changed-features/pattern-bindings.md b/docs/_docs/reference/changed-features/pattern-bindings.md index 2de338fc1dde..a75d64e7cd2d 100644 --- a/docs/_docs/reference/changed-features/pattern-bindings.md +++ b/docs/_docs/reference/changed-features/pattern-bindings.md @@ -50,7 +50,7 @@ for case (x, y) <- elems yield (y, x) // returns List((2, 1), (4, 3)) ## Syntax Changes Generators in for expressions may be prefixed with `case`. -``` +```ebnf Generator ::= [‘case’] Pattern1 ‘<-’ Expr ``` diff --git a/docs/_docs/reference/changed-features/structural-types-spec.md b/docs/_docs/reference/changed-features/structural-types-spec.md index d456932649fb..18d0f31ee6fe 100644 --- a/docs/_docs/reference/changed-features/structural-types-spec.md +++ b/docs/_docs/reference/changed-features/structural-types-spec.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/structu ## Syntax -``` +```ebnf SimpleType ::= ... | Refinement Refinement ::= ‘{’ RefineStatSeq ‘}’ RefineStatSeq ::= RefineStat {semi RefineStat} diff --git a/docs/_docs/reference/changed-features/vararg-splices.md b/docs/_docs/reference/changed-features/vararg-splices.md index 43c4acc5f880..8f23af771216 100644 --- a/docs/_docs/reference/changed-features/vararg-splices.md +++ b/docs/_docs/reference/changed-features/vararg-splices.md @@ -24,7 +24,7 @@ The old syntax for splice arguments will be phased out. ## Syntax -``` +```ebnf ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ | ‘(’ [Patterns ‘,’] Pattern2 ‘*’ ‘)’ diff --git a/docs/_docs/reference/contextual/context-bounds.md b/docs/_docs/reference/contextual/context-bounds.md index 42479d6802b3..11d57c8cbd52 100644 --- a/docs/_docs/reference/contextual/context-bounds.md +++ b/docs/_docs/reference/contextual/context-bounds.md @@ -47,7 +47,7 @@ done automatically under `-rewrite`. ## Syntax -``` +```ebnf TypeParamBounds ::= [SubtypeBounds] {ContextBound} ContextBound ::= ‘:’ Type ``` diff --git a/docs/_docs/reference/contextual/context-functions-spec.md b/docs/_docs/reference/contextual/context-functions-spec.md index 109513e9da86..385ee3901fd8 100644 --- a/docs/_docs/reference/contextual/context-functions-spec.md +++ b/docs/_docs/reference/contextual/context-functions-spec.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-funct ## Syntax -``` +```ebnf Type ::= ... | FunArgTypes ‘?=>’ Type Expr ::= ... diff --git a/docs/_docs/reference/contextual/derivation.md b/docs/_docs/reference/contextual/derivation.md index a4da7c470e3c..66d0cf3fdf38 100644 --- a/docs/_docs/reference/contextual/derivation.md +++ b/docs/_docs/reference/contextual/derivation.md @@ -34,6 +34,9 @@ given [T: Ordering]: Ordering[Option[T]] = Ordering.derived It is discouraged to directly refer to the `derived` member if you can use a `derives` clause instead. +All data types can have a `derives` clause. This document focuses primarily on data types which also have a given instance +of the `Mirror` type class available. + ## Exact mechanism In the following, when type arguments are enumerated and the first index evaluates to a larger value than the last, then there are actually no arguments, for example: `A[T_2, ..., T_1]` means `A`. @@ -281,7 +284,7 @@ Note the following properties of `Mirror` types, + The methods `ordinal` and `fromProduct` are defined in terms of `MirroredMonoType` which is the type of kind-`*` which is obtained from `MirroredType` by wildcarding its type parameters. -### Implementing `derived` with `Mirror` +## Implementing `derived` with `Mirror` As seen before, the signature and implementation of a `derived` method for a type class `TC[_]` are arbitrary, but we expect it to typically be of the following form: @@ -507,9 +510,9 @@ The framework described here enables all three of these approaches without manda For a brief discussion on how to use macros to write a type class `derived` method please read more at [How to write a type class `derived` method using macros](./derivation-macro.md). -### Syntax +## Syntax -``` +```ebnf Template ::= InheritClauses [TemplateBody] EnumDef ::= id ClassConstr InheritClauses EnumBody InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] diff --git a/docs/_docs/reference/contextual/extension-methods.md b/docs/_docs/reference/contextual/extension-methods.md index d23cadf513d7..d98d80caafc5 100644 --- a/docs/_docs/reference/contextual/extension-methods.md +++ b/docs/_docs/reference/contextual/extension-methods.md @@ -244,7 +244,18 @@ The precise rules for resolving a selection to an extension method are as follow Assume a selection `e.m[Ts]` where `m` is not a member of `e`, where the type arguments `[Ts]` are optional, and where `T` is the expected type. The following two rewritings are tried in order: - 1. The selection is rewritten to `m[Ts](e)`. + 1. The selection is rewritten to `m[Ts](e)` and typechecked, using the following + slight modification of the name resolution rules: + + - If `m` is imported by several imports which are all on the nesting level, + try each import as an extension method instead of failing with an ambiguity. + If only one import leads to an expansion that typechecks without errors, pick + that expansion. If there are several such imports, but only one import which is + not a wildcard import, pick the expansion from that import. Otherwise, report + an ambiguous reference error. + + **Note**: This relaxation is currently enabled only under the `experimental.relaxedExtensionImports` language import. + 2. If the first rewriting does not typecheck with expected type `T`, and there is an extension method `m` in some eligible object `o`, the selection is rewritten to `o.m[Ts](e)`. An object `o` is _eligible_ if @@ -285,7 +296,7 @@ def position(s: String)(ch: Char, n: Int): Int = Here are the syntax changes for extension methods and collective extensions relative to the [current syntax](../syntax.md). -``` +```ebnf BlockStat ::= ... | Extension TemplateStat ::= ... | Extension TopStat ::= ... | Extension diff --git a/docs/_docs/reference/contextual/given-imports.md b/docs/_docs/reference/contextual/given-imports.md index 6a55368979b1..28442581e408 100644 --- a/docs/_docs/reference/contextual/given-imports.md +++ b/docs/_docs/reference/contextual/given-imports.md @@ -103,7 +103,7 @@ given instances once their user base has migrated. ## Syntax -``` +```ebnf Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index 1bfffbc5bf6f..f1333bf8811f 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -10,7 +10,7 @@ that serve for synthesizing arguments to [context parameters](./using-clauses.md ```scala trait Ord[T]: def compare(x: T, y: T): Int - extension (x: T) + extension (x: T) def < (y: T) = compare(x, y) < 0 def > (y: T) = compare(x, y) > 0 @@ -174,7 +174,7 @@ is created for each reference. Here is the syntax for given instances: -``` +```ebnf TmplDef ::= ... | ‘given’ GivenDef GivenDef ::= [GivenSig] StructuralInstance diff --git a/docs/_docs/reference/contextual/right-associative-extension-methods.md b/docs/_docs/reference/contextual/right-associative-extension-methods.md index 068123df8cd2..61f0beece6ed 100644 --- a/docs/_docs/reference/contextual/right-associative-extension-methods.md +++ b/docs/_docs/reference/contextual/right-associative-extension-methods.md @@ -4,45 +4,57 @@ title: "Right-Associative Extension Methods: Details" nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/right-associative-extension-methods.html --- -The most general form of leading parameters of an extension method is as follows: + +The most general signature an extension method can have is as follows: + - An optional type clause `leftTyParams` - A possibly empty list of using clauses `leadingUsing` - - A single parameter `extensionParam` + - A single parameter `leftParam` (in an explicit term clause) - A possibly empty list of using clauses `trailingUsing` + - A name (preceded by the `def` keyword) + - An optional type clause `rightTyParams` + - An optional single parameter `rightParam` (in an explicit term clause) + - Any number of any clauses `rest` -This is then followed by `def`, the method name, and possibly further parameters -`otherParams`. An example is: +For example: ```scala - extension (using a: A, b: B)(using c: C) // <-- leadingUsing - (x: X) // <-- extensionParam + extension [T] // <-- leftTyParams + (using a: A, b: B)(using c: C) // <-- leadingUsing + (x: X) // <-- leftParam (using d: D) // <-- trailingUsing - def +:: (y: Y)(using e: E)(z: Z) // <-- otherParams + def +:: [U] // <-- rightTyParams + (y: Y) // <-- rightParam + (using e: E)(z: Z) // <-- rest ``` + An extension method is treated as a right-associative operator (as in [SLS §6.12.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#infix-operations)) -if it has a name ending in `:` and is immediately followed by a -single parameter. In the example above, that parameter is `(y: Y)`. +if it has a name ending in `:`, and is immediately followed by a +single explicit term parameter (in other words, `rightParam` is present). In the example above, that parameter is `(y: Y)`. The Scala compiler pre-processes a right-associative infix operation such as `x +: xs` to `xs.+:(x)` if `x` is a pure expression or a call-by-name parameter and to `val y = x; xs.+:(y)` otherwise. This is necessary since a regular right-associative infix method is defined in the class of its right operand. To make up for this swap, -the expansion of right-associative extension methods performs an analogous parameter swap. More precisely, if `otherParams` consists of a single parameter -`rightParam` followed by `remaining`, the total parameter sequence +the expansion of right-associative extension methods performs the inverse parameter swap. More precisely, if `rightParam` is present, the total parameter sequence of the extension method's expansion is: ``` - leadingUsing rightParam trailingUsing extensionParam remaining + leftTyParams leadingUsing rightTyParams rightParam leftParam trailingUsing rest ``` +In other words, we swap `leftParams trailingUsing` with `rightTyParam rightParam`. + For instance, the `+::` method above would become ```scala - def +:: (using a: A, b: B)(using c: C) + def +:: [T] + (using a: A, b: B)(using c: C) + [U] (y: Y) - (using d: D) (x: X) + (using d: D) (using e: E)(z: Z) ``` diff --git a/docs/_docs/reference/contextual/using-clauses.md b/docs/_docs/reference/contextual/using-clauses.md index f590cc2e7492..9177a2f47dc9 100644 --- a/docs/_docs/reference/contextual/using-clauses.md +++ b/docs/_docs/reference/contextual/using-clauses.md @@ -150,10 +150,10 @@ def summon[T](using x: T): x.type = x Here is the new syntax of parameters and arguments seen as a delta from the [standard context free syntax of Scala 3](../syntax.md). `using` is a soft keyword, recognized only at the start of a parameter or argument list. It can be used as a normal identifier everywhere else. -``` +```ebnf ClsParamClause ::= ... | UsingClsParamClause -DefParamClauses ::= ... | UsingParamClause +DefParamClause ::= ... | UsingParamClause UsingClsParamClause ::= ‘(’ ‘using’ (ClsParams | Types) ‘)’ -UsingParamClause ::= ‘(’ ‘using’ (DefParams | Types) ‘)’ +UsingParamClause ::= ‘(’ ‘using’ (DefTermParams | Types) ‘)’ ParArgumentExprs ::= ... | ‘(’ ‘using’ ExprsInParens ‘)’ ``` diff --git a/docs/_docs/reference/enums/adts.md b/docs/_docs/reference/enums/adts.md index 3ab8c9f3b45b..5219e062a633 100644 --- a/docs/_docs/reference/enums/adts.md +++ b/docs/_docs/reference/enums/adts.md @@ -154,7 +154,7 @@ The changes are specified below as deltas with respect to the Scala syntax given 1. Enum definitions are defined as follows: - ``` + ```ebnf TmplDef ::= `enum' EnumDef EnumDef ::= id ClassConstr [`extends' [ConstrApps]] EnumBody EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ @@ -164,7 +164,7 @@ The changes are specified below as deltas with respect to the Scala syntax given 2. Cases of enums are defined as follows: - ``` + ```ebnf EnumCase ::= `case' (id ClassConstr [`extends' ConstrApps]] | ids) ``` diff --git a/docs/_docs/reference/experimental/erased-defs-spec.md b/docs/_docs/reference/experimental/erased-defs-spec.md index 5395a8468399..59dfed92da2a 100644 --- a/docs/_docs/reference/experimental/erased-defs-spec.md +++ b/docs/_docs/reference/experimental/erased-defs-spec.md @@ -19,8 +19,8 @@ TODO: complete def g(erased x: Int) = ... - (erased x: Int) => ... - def h(x: (erased Int) => Int) = ... + (erased x: Int, y: Int) => ... + def h(x: (Int, erased Int) => Int) = ... class K(erased x: Int) { ... } erased class E {} @@ -34,12 +34,12 @@ TODO: complete 3. Functions * `(erased x1: T1, x2: T2, ..., xN: TN) => y : (erased T1, T2, ..., TN) => R` - * `(given erased x1: T1, x2: T2, ..., xN: TN) => y: (given erased T1, T2, ..., TN) => R` + * `(given x1: T1, erased x2: T2, ..., xN: TN) => y: (given T1, erased T2, ..., TN) => R` * `(given erased T1) => R <:< erased T1 => R` - * `(given erased T1, T2) => R <:< (erased T1, T2) => R` + * `(given T1, erased T2) => R <:< (T1, erased T2) => R` * ... - Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`) + Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`). The `erased` parameters must match exactly in their respective positions. 4. Eta expansion @@ -51,7 +51,8 @@ TODO: complete * All `erased` parameters are removed from the function * All argument to `erased` parameters are not passed to the function * All `erased` definitions are removed - * All `(erased T1, T2, ..., TN) => R` and `(given erased T1, T2, ..., TN) => R` become `() => R` + * `(erased ET1, erased ET2, T1, ..., erased ETN, TM) => R` are erased to `(T1, ..., TM) => R`. + * `(given erased ET1, erased ET2, T1, ..., erased ETN, TM) => R` are erased to `(given T1, ..., TM) => R`. 6. Overloading @@ -60,5 +61,10 @@ TODO: complete 7. Overriding - * Member definitions overriding each other must both be `erased` or not be `erased` - * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa + * Member definitions overriding each other must both be `erased` or not be `erased`. + * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa. + +8. Type Restrictions + * For dependent functions, `erased` parameters are limited to realizable types, that is, types that are inhabited by non-null values. + This restriction stops us from using a bad bound introduced by an erased value, which leads to unsoundness (see #4060). + * Polymorphic functions with erased parameters are currently not supported, and will be rejected by the compiler. This is purely an implementation restriction, and might be lifted in the future. diff --git a/docs/_docs/reference/experimental/erased-defs.md b/docs/_docs/reference/experimental/erased-defs.md index 28455f26cdc0..d266cd6c9d19 100644 --- a/docs/_docs/reference/experimental/erased-defs.md +++ b/docs/_docs/reference/experimental/erased-defs.md @@ -54,13 +54,13 @@ semantics and they are completely erased. ## How to define erased terms? Parameters of methods and functions can be declared as erased, placing `erased` -in front of a parameter list (like `given`). +in front of each erased parameter (like `inline`). ```scala -def methodWithErasedEv(erased ev: Ev): Int = 42 +def methodWithErasedEv(erased ev: Ev, x: Int): Int = x + 2 -val lambdaWithErasedEv: erased Ev => Int = - (erased ev: Ev) => 42 +val lambdaWithErasedEv: (erased Ev, Int) => Int = + (erased ev, x) => x + 2 ``` `erased` parameters will not be usable for computations, though they can be used @@ -80,7 +80,7 @@ parameters. ```scala erased val erasedEvidence: Ev = ... -methodWithErasedEv(erasedEvidence) +methodWithErasedEv(erasedEvidence, 40) // 42 ``` ## What happens with erased values at runtime? @@ -89,15 +89,15 @@ As `erased` are guaranteed not to be used in computations, they can and will be erased. ```scala -// becomes def methodWithErasedEv(): Int at runtime -def methodWithErasedEv(erased ev: Ev): Int = ... +// becomes def methodWithErasedEv(x: Int): Int at runtime +def methodWithErasedEv(x: Int, erased ev: Ev): Int = ... def evidence1: Ev = ... erased def erasedEvidence2: Ev = ... // does not exist at runtime erased val erasedEvidence3: Ev = ... // does not exist at runtime -// evidence1 is not evaluated and no value is passed to methodWithErasedEv -methodWithErasedEv(evidence1) +// evidence1 is not evaluated and only `x` is passed to methodWithErasedEv +methodWithErasedEv(x, evidence1) ``` ## State machine with erased evidence example @@ -161,8 +161,8 @@ object Machine: // State must be Off ``` -Note that in [Inline](../metaprogramming/inline.md) we discussed `erasedValue` and inline -matches. `erasedValue` is implemented with `erased`, so the state machine above +Note that in [Compile-time operations](../metaprogramming/compiletime-ops.md#erasedvalue) we discussed `erasedValue` and inline +matches. `erasedValue` is internally implemented with `erased` (and is not experimental), so the state machine above can be encoded as follows: ```scala diff --git a/docs/_docs/reference/experimental/generalized-method-syntax.md b/docs/_docs/reference/experimental/generalized-method-syntax.md new file mode 100644 index 000000000000..072052c1ae10 --- /dev/null +++ b/docs/_docs/reference/experimental/generalized-method-syntax.md @@ -0,0 +1,102 @@ +--- +layout: doc-page +title: "Generalized Method Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/generalized-method-syntax.html +--- + +This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: + +```scala +import scala.language.experimental.clauseInterleaving +``` + +The inclusion of using clauses is not the only way in which methods have been updated, type parameter clauses are now allowed in any number and at any position. + +## Syntax Changes + +### In Scala 2 + +The old syntax only allowed zero or one type parameter clause, followed by any number of term clauses, optionnally followed by an implicit clause: + +```scala +def foo[T, U](x: T)(y: U)(z: Int, s: String)(a: Array[T])(implicit ordInt: Ord[Int], l: List[U]) +``` + +### In Scala 3 + +The new syntax allows any number of type clauses, as long as they are not adjacent: +(do note however that [implicit clause are discouraged, in favor of using clauses](https://docs.scala-lang.org/scala3/reference/contextual/relationship-implicits.html)) + +```scala +def foo[T, U](x: T)(y: U)[V](z: V, s: String)(using Ord[Int])[A](a: Array[A])(implicit List[U]) +``` + +### Unchanged + +Class definitions and type declarations are unaffected, there can only be up to one type clause, in leading posion. + +## Motivation + +The new syntax is a powerful but natural extension of the old one, it allows new design patterns while staying intuitive and legible. + +### Dependent Type Clauses + +As type clauses can come after term clauses, it is now possible to have type parameters that depend on term parameters: + +```scala +trait Key { type Value } +trait DB { + def get(k: Key): Option[k.Value] // dependent result type + def getOrElse(k: Key)[V >: k.Value](default: V): V // dependent type parameter +} +``` + +Note that simply replacing `V` by `k.Value` would not be equivalent. For example, if `k.Value` is `Some[Int]`, only the above allows: +`getOrElse(k)[Option[Int]](None)`, which returns a `Number`. + +## Details + +### Application + +Method application is unchanged. +When multiple type clauses are expected but not all are passed, the rightmost ones are inferred. + +In particular, the following does not type check, even though the argument `Char` is only valid for `C`: +```scala +def triple[I <: Int](using Ordering[I])[C <: Char](a: I, b: C) = ??? +triple[Char](0, 'c') // error: Char does not conform to upperbound Int +``` + +### Extension Methods + +Extension methods follow the same syntax, for example the following is valid: +```scala +extension [T](l1: List[T]) + def zipWith[U](l2: List[U])[V](l3: List[V]): List[(T,U,V)] +``` + +### When to use + +We recommand to always put a unique type clause at the beginning, unless it is not possible to do so. +For example, the extension method `zipWith` above should be written `zipWith[U, V](l2: List[U], l3: List[V]): List[(T,U,V)]` instead. +On the other hand, the `getOrElse` method is recommended as-is, as it cannot be written with a leading type clause. + +### Formal syntax + +``` +DefDcl ::= DefSig ‘:’ Type +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr +DefSig ::= id [DefParamClauses] [DefImplicitClause] +DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent +DefParamClause ::= DefTypeParamClause + | DefTermParamClause + | UsingParamClause +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ +DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ +DefTermParams ::= DefTermParam {‘,’ DefTermParam} +DefTermParam ::= {Annotation} [‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] +``` diff --git a/docs/_docs/reference/experimental/into-modifier.md b/docs/_docs/reference/experimental/into-modifier.md new file mode 100644 index 000000000000..2ee4c74539b3 --- /dev/null +++ b/docs/_docs/reference/experimental/into-modifier.md @@ -0,0 +1,81 @@ +--- +layout: doc-page +title: "The `into` Type Modifier" +redirectFrom: /docs/reference/other-new-features/into-modifier.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/into-modifier.html +--- + +Scala 3's implicit conversions of the `scala.Conversion` class require a language import +``` +import scala.language.implicitConversions +``` +in any code that uses them as implicit conversions (code that calls conversions explicitly is not affected). If the import is missing, a feature warning is currently issued, and this will become an error in a future version of Scala 3. The motivation for this restriction is that code with hidden implicit conversions is hard to understand and might have correctness or performance problems that go undetected. + +There is one broad use case, however, where implicit conversions are very hard to replace. This is the case where an implicit conversion is used to adapt a method argument to its formal parameter type. An example from the standard library: +```scala +scala> val xs = List(0, 1) +scala> val ys = Array(2, 3) +scala> xs ++ ys +val res0: List[Int] = List(0, 1, 2, 3) +``` +The last input made use of an implicit conversion from `Array[Int]` to `IterableOnce[Int]` which is defined as a Scala 2 style implicit conversion in the standard library. Once the standard library is rewritten with Scala 3 conversions, this will +require a language import at the use site, which is clearly unacceptable. It is possible to avoid the need for implicit conversions using method overloading or type classes, but this often leads to longer and more complicated code, and neither of these alternatives work for vararg parameters. + +This is where the `into` modifier on parameter types comes in. Here is a signature of the `++` method on `List[A]` that uses it: +```scala + def ++ (elems: into IterableOnce[A]): List[A] +``` +The `into` modifier on the type of `elems` means that implicit conversions can be applied to convert the actual argument to an `IterableOnce` value, and this without needing a language import. + +## Function arguments + +`into` also allows conversions on the results of function arguments. For instance, consider the new proposed signature of the `flatMap` method on `List[A]`: + +```scala + def flatMap[B](f: into A => IterableOnce[B]): List[B] +``` +This allows a conversion of the actual argument to the function type `A => IterableOnce[B]`. Crucially, it also allows that conversion to be applied to +the function result. So the following would work: +```scala +scala> val xs = List(1, 2, 3) +scala> xs.flatMap(x => x.toString * x) +val res2: List[Char] = List(1, 2, 2, 3, 3, 3) +``` +Here, the conversion from `String` to `Iterable[Char]` is applied on the results of `flatMap`'s function argument when it is applied to the elements of `xs`. + +## Vararg arguments + +When applied to a vararg parameter, `into` allows a conversion on each argument value individually. For example, consider a method `concatAll` that concatenates a variable +number of `IterableOnce[Char]` arguments, and also allows implicit conversions into `IterableOnce[Char]`: + +```scala +def concatAll(xss: into IterableOnce[Char]*): List[Char] = + xss.foldLeft(List[Char]())(_ ++ _) +``` +Here, the call +```scala +concatAll(List('a'), "bc", Array('d', 'e')) +``` +would apply two _different_ implicit conversions: the conversion from `String` to `Iterable[Char]` gets applied to the second argument and the conversion from `Array[Char]` to `Iterable[Char]` gets applied to the third argument. + +## Retrofitting Scala 2 libraries + +A new annotation `allowConversions` has the same effect as an `into` modifier. It is defined as an `@experimental` class in package `scala.annotation`. It is intended to be used for retrofitting Scala 2 library code so that Scala 3 conversions can be applied to arguments without language imports. For instance, the definitions of +`++` and `flatMap` in the Scala 2.13 `List` class could be retrofitted as follows. +```scala + def ++ (@allowConversions elems: IterableOnce[A]): List[A] + def flatMap[B](@allowConversions f: A => IterableOnce[B]): List[B] +``` +For Scala 3 code, the `into` modifier is preferred. First, because it is shorter, +and second, because it adheres to the principle that annotations should not influence +typing and type inference in Scala. + +## Syntax changes + +The addition to the grammar is: +``` +ParamType ::= [‘=>’] ParamValueType +ParamValueType ::= [‘into‘] ExactParamType +ExactParamType ::= Type [‘*’] +``` +As the grammar shows, `into` can only applied to the type of a parameter; it is illegal in other positions. diff --git a/docs/_docs/reference/experimental/named-typeargs-spec.md b/docs/_docs/reference/experimental/named-typeargs-spec.md index 9e1113bbac86..741836a481f2 100644 --- a/docs/_docs/reference/experimental/named-typeargs-spec.md +++ b/docs/_docs/reference/experimental/named-typeargs-spec.md @@ -10,7 +10,7 @@ In this section we give more details about the [named type arguments](named-type The addition to the grammar is: -``` +```ebnf SimpleExpr1 ::= ... | SimpleExpr (TypeArgs | NamedTypeArgs) NamedTypeArgs ::= ‘[’ NamedTypeArg {‘,’ NamedTypeArg} ‘]’ @@ -19,7 +19,7 @@ NamedTypeArg ::= id ‘=’ Type Note in particular that named arguments cannot be passed to type constructors: -``` scala +```scala class C[T] val x: C[T = Int] = // error diff --git a/docs/_docs/reference/language-versions/source-compatibility.md b/docs/_docs/reference/language-versions/source-compatibility.md index 077f06b2b4db..145c4a84d11b 100644 --- a/docs/_docs/reference/language-versions/source-compatibility.md +++ b/docs/_docs/reference/language-versions/source-compatibility.md @@ -17,15 +17,17 @@ The default Scala language syntax version currently supported by the Dotty compi - in conjunction with `-rewrite`, offer code rewrites from Scala 2.13 to 3.0. - [`3.0`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/0$.html), [`3.1`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/1$.html): the default set of features included in scala versions `3.0.0` to `3.1.3`. +- [`3.2-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2-migration$.html): the same as `3.2`, but in conjunction with `-rewrite`, offer code rewrites from Scala `3.0/3.1` to `3.2`. - [`3.2`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2$.html): the same as `3.0` and `3.1`, but in addition: - [stricter pattern bindings](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html) are now enabled (part of `future` in earlier `3.x` releases), producing warnings for refutable patterns. These warnings can be silenced to achieve the same runtime behavior, but in `future` they become errors and refutable patterns will not compile. - [Nonlocal returns](https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html) now produce a warning upon usage (they are still an error under `future`). -- [`3.2-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2-migration$.html): the same as `3.2`, but in conjunction with `-rewrite`, offer code rewrites from Scala `3.0/3.1` to `3.2`. -- [`future`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future$.html): A preview of changes that will be introduced in `3.x` versions after `3.2`. +- [`3.3-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/3-migration$.html): the same as `3.3` +- [`3.3`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/3$.html): the same as `3.2`, but in addition: + - [Fewer braces syntax](https://docs.scala-lang.org/scala3/reference/other-new-features/indentation.html#optional-braces-for-method-arguments-1) is enabled by default. +- [`future-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future-migration$.html): Same as `future` but with additional helpers to migrate from `3.3`. Similarly to the helpers available under `3.0-migration`, these include migration warnings and optional rewrites. +- [`future`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future$.html): A preview of changes that will be introduced in `3.x` versions after `3.3`. Some Scala 2 specific idioms are dropped in this version. The feature set supported by this version may grow over time as features become stabilised for preview. -- [`future-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future-migration$.html): Same as `future` but with additional helpers to migrate from `3.2`. Similarly to the helpers available under `3.0-migration`, these include migration warnings and optional rewrites. - There are two ways to specify a language version : - with a `-source` command line setting, e.g. `-source 3.0-migration`. diff --git a/docs/_docs/reference/metaprogramming/macros-spec.md b/docs/_docs/reference/metaprogramming/macros-spec.md index aa8f94a9a1f7..27a0a2c1bdcb 100644 --- a/docs/_docs/reference/metaprogramming/macros-spec.md +++ b/docs/_docs/reference/metaprogramming/macros-spec.md @@ -4,251 +4,711 @@ title: "Macros Spec" nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros-spec.html --- +## Formalization + +* Multi-stage programming with generative and analytical macros[^2] +* Multi-Stage Macro Calculus, Chapter 4 of Scalable Metaprogramming in Scala 3[^1]. + Contains and extends the calculus of _Multi-stage programming with generative and analytical macros_ with type polymorphism. + +## Syntax + +The quotation syntax using `'` and `$` was chosen to mimic the string interpolation syntax of Scala. +Like a string double-quotation, a single-quote block can contain splices. +However, unlike strings, splices can contain quotes using the same rules. + +```scala +s" Hello $name" s" Hello ${name}" +'{ hello($name) } '{ hello(${name}) } +${ hello('name) } ${ hello('{name}) } +``` + +### Quotes +Quotes come in four flavors: quoted identifiers, quoted blocks, quoted block patterns and quoted type patterns. +Scala 2 used quoted identifiers to represent `Symbol` literals. They were deprecated in Scala 3, allowing the syntax to be used for quotation. +```scala +SimpleExpr ::= ... + | `'` alphaid // quoted identifier + | `'` `{` Block `}` // quoted block +Pattern ::= ... + | `'` `{` Block `}` // quoted block pattern + | `'` `[` Type `]` // quoted type pattern +``` + +Quoted blocks and quoted block patterns contain an expression equivalent to a normal block of code. +When entering either of those we track the fact that we are in a quoted block (`inQuoteBlock`) which is used for spliced identifiers. +When entering a quoted block pattern we additionally track the fact that we are in a quoted pattern (`inQuotePattern`) which is used to distinguish spliced blocks and splice patterns. +Lastly, the quoted type pattern simply contains a type. + +### Splices +Splices come in three flavors: spliced identifiers, spliced blocks and splice patterns. +Scala specifies identifiers containing `$` as valid identifiers but reserves them for compiler and standard library use only. +Unfortunately, many libraries have used such identifiers in Scala 2. Therefore to mitigate the cost of migration, we still support them. +We work around this by only allowing spliced identifiers[^3] within quoted blocks or quoted block patterns (`inQuoteBlock`). +Splice blocks and splice patterns can contain an arbitrary block or pattern respectively. +They are distinguished based on their surrounding quote (`inQuotePattern`), a quote block will contain spliced blocks, and a quote block pattern will contain splice patterns. + +```scala +SimpleExpr ::= ... + | `$` alphaid if inQuoteBlock // spliced identifier + | `$` `{` Block `}` if !inQuotePattern // spliced block + | `$` `{` Pattern `}` if inQuotePattern // splice pattern +``` + +### Quoted Pattern Type Variables +Quoted pattern type variables in quoted patterns and quoted type patterns do not require additional syntax. +Any type definition or reference with a name composed of lower cases is assumed to be a pattern type variable definition while typing. +A backticked type name with lower cases is interpreted as a reference to the type with that name. + + ## Implementation -### Syntax - -Compared to the [Scala 3 reference grammar](../syntax.md) -there are the following syntax changes: -``` -SimpleExpr ::= ... - | ‘'’ ‘{’ Block ‘}’ - | ‘'’ ‘[’ Type ‘]’ - | ‘$’ ‘{’ Block ‘}’ -SimpleType ::= ... - | ‘$’ ‘{’ Block ‘}’ -``` -In addition, an identifier `$x` starting with a `$` that appears inside -a quoted expression or type is treated as a splice `${x}` and a quoted identifier -`'x` that appears inside a splice is treated as a quote `'{x}` - -### Implementation in `scalac` - -Quotes and splices are primitive forms in the generated abstract syntax trees. -Top-level splices are eliminated during macro expansion while typing. On the -other hand, top-level quotes are eliminated in an expansion phase `PickleQuotes` -phase (after typing and pickling). PCP checking occurs while preparing the RHS -of an inline method for top-level splices and in the `Staging` phase (after -typing and before pickling). - -Macro-expansion works outside-in. If the outermost scope is a splice, -the spliced AST will be evaluated in an interpreter. A call to a -previously compiled method can be implemented as a reflective call to -that method. With the restrictions on splices that are currently in -place that’s all that’s needed. We might allow more interpretation in -splices in the future, which would allow us to loosen the -restriction. Quotes in spliced, interpreted code are kept as they -are, after splices nested in the quotes are expanded. - -If the outermost scope is a quote, we need to generate code that -constructs the quoted tree at run-time. We implement this by -serializing the tree as a TASTy structure, which is stored -in a string literal. At runtime, an unpickler method is called to -deserialize the string into a tree. - -Splices inside quoted code insert the spliced tree as is, after -expanding any quotes in the spliced code recursively. +### Run-Time Representation -## Formalization +The standard library defines the `Quotes` interface which contains all the logic and the abstract classes `Expr` and `Type`. +The compiler implements the `Quotes` interface and provides the implementation of `Expr` and `Type`. -The phase consistency principle can be formalized in a calculus that -extends simply-typed lambda calculus with quotes and splices. +##### `class Expr` +Expressions of type `Expr[T]` are represented by the following abstract class: +```scala +abstract class Expr[+T] private[scala] +``` +The only implementation of `Expr` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps a typed AST and a `Scope` object with no methods of its own. +The `Scope` object is used to track the current splice scope and detect scope extrusions. -### Syntax +##### `object Expr` +The companion object of `Expr` contains a few useful static methods; +the `apply`/`unapply` methods to use `ToExpr`/`FromExpr` with ease; +the `betaReduce` and `summon` methods. +It also contains methods to create expressions out of lists or sequences of expressions: `block`, `ofSeq`, `ofList`, `ofTupleFromSeq` and `ofTuple`. -The syntax of terms, values, and types is given as follows: +```scala +object Expr: + def apply[T](x: T)(using ToExpr[T])(using Quotes): Expr[T] = ... + def unapply[T](x: Expr[T])(using FromExpr[T])(using Quotes): Option[T] = ... + def betaReduce[T](e: Expr[T])(using Quotes): Expr[T] = ... + def summon[T: Type](using Quotes): Option[Expr[T]] = ... + def block[T](stats: List[Expr[Any]], e: Expr[T])(using Quotes): Expr[T] = ... + def ofSeq[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[Seq[T]] = ... + def ofList[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[List[T]] = ... + def ofTupleFromSeq(xs: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = ... + def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): + Expr[Tuple.InverseMap[T, Expr]] = ... ``` -Terms t ::= x variable - (x: T) => t lambda - t t application - 't quote - $t splice -Values v ::= (x: T) => t lambda - 'u quote +##### `class Type` +Types of type `Type[T]` are represented by the following abstract class: +```scala +abstract class Type[T <: AnyKind] private[scala]: + type Underlying = T +``` + +The only implementation of `Type` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps the AST of a type and a `Scope` object with no methods of its own. +The upper bound of `T` is `AnyKind` which implies that `T` may be a higher-kinded type. +The `Underlying` alias is used to select the type from an instance of `Type`. +Users never need to use this alias as they can always use `T` directly. +`Underlying` is used for internal encoding while compiling the code (see _Type Healing_). -Simple terms u ::= x | (x: T) => u | u u | 't +##### `object Type` +The companion object of `Type` contains a few useful static methods. +The first and most important one is the `Type.of` given definition. +This instance of `Type[T]` is summoned by default when no other instance is available. +The `of` operation is an intrinsic operation that the compiler will transform into code that will generate the `Type[T]` at run-time. +Secondly, the `Type.show[T]` operation will show a string representation of the type, which is often useful when debugging. +Finally, the object defines `valueOfConstant` (and `valueOfTuple`) which can transform singleton types (or tuples of singleton types) into their value. -Types T ::= A base type - T -> T function type - expr T quoted + +```scala +object Type: + given of[T <: AnyKind](using Quotes): Type[T] = ... + def show[T <: AnyKind](using Type[T])(using Quotes): String = ... + def valueOfConstant[T](using Type[T])(using Quotes): Option[T] = ... + def valueOfTuple[T <: Tuple](using Type[T])(using Quotes): Option[T] = ... ``` -Typing rules are formulated using a stack of environments -`Es`. Individual environments `E` consist as usual of variable -bindings `x: T`. Environments can be combined using the two -combinators `'` and `$`. + +##### `Quotes` +The `Quotes` interface is where most of the primitive operations of the quotation system are defined. + +Quotes define all the `Expr[T]` methods as extension methods. +`Type[T]` does not have methods and therefore does not appear here. +These methods are available as long as `Quotes` is implicitly given in the current scope. + +The `Quotes` instance is also the entry point to the [reflection API](./reflection.md) through the `reflect` object. + +Finally, `Quotes` provides the internal logic used in quote un-pickling (`QuoteUnpickler`) in quote pattern matching (`QuoteMatching`). +These interfaces are added to the self-type of the trait to make sure they are implemented on this object but not visible to users of `Quotes`. + +Internally, the implementation of `Quotes` will also track its current splicing scope `Scope`. +This scope will be attached to any expression that is created using this `Quotes` instance. + +```scala +trait Quotes: + this: runtime.QuoteUnpickler & runtime.QuoteMatching => + + extension [T](self: Expr[T]) + def show: String + def matches(that: Expr[Any]): Boolean + def value(using FromExpr[T]): Option[T] + def valueOrAbort(using FromExpr[T]): T + end extension + + extension (self: Expr[Any]) + def isExprOf[X](using Type[X]): Boolean + def asExprOf[X](using Type[X]): Expr[X] + end extension + + // abstract object reflect ... ``` -Environment E ::= () empty - E, x: T -Env. stack Es ::= () empty - E simple - Es * Es combined -Separator * ::= ' - $ +##### `Scope` +The splice context is represented as a stack (immutable list) of `Scope` objects. +Each `Scope` contains the position of the splice (used for error reporting) and a reference to the enclosing splice scope `Scope`. +A scope is a sub-scope of another if the other is contained in its parents. +This check is performed when an expression is spliced into another using the `Scope` provided in the current scope in `Quotes` and the one in the `Expr` or `Type`. + +### Entry Points +The two entry points for multi-stage programming are macros and the `run` operation. + +#### Macros +Inline macro definitions will inline a top-level splice (a splice not nested in a quote). +This splice needs to be evaluated at compile-time. +In _Avoiding a complete interpreter_[^1], we stated the following restrictions: + + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are either literal constants, quoted expressions (parameters), `Type.of` for type parameters and a reference to `Quotes`. + +These restrictions make the implementation of the interpreter quite simple. +Java Reflection is used to call the single function call in the top-level splice. +The execution of that function is entirely done on compiled bytecode. +These are Scala static methods and may not always become Java static methods, they might be inside module objects. +As modules are encoded as class instances, we need to interpret the prefix of the method to instantiate it before we can invoke the method. + +The code of the arguments has not been compiled and therefore needs to be interpreted by the compiler. +Interpreting literal constants is as simple as extracting the constant from the AST that represents literals. +When interpreting a quoted expression, the contents of the quote is kept as an AST which is wrapped inside the implementation of `Expr`. +Calls to `Type.of[T]` also wrap the AST of the type inside the implementation of `Type`. +Finally, the reference to `Quotes` is supposed to be the reference to the quotes provided by the splice. +This reference is interpreted as a new instance of `Quotes` that contains a fresh initial `Scope` with no parents. + +The result of calling the method via Java Reflection will return an `Expr` containing a new AST that was generated by the implementation of that macro. +The scope of this `Expr` is checked to make sure it did not extrude from some splice or `run` operation. +Then the AST is extracted from the `Expr` and it is inserted as replacement for the AST that contained the top-level splice. + + +#### Run-time Multi-Stage Programming + +To be able to compile the code, the `scala.quoted.staging` library defines the `Compiler` trait. +An instance of `staging.Compiler` is a wrapper over the normal Scala~3 compiler. +To be instantiated it requires an instance of the JVM _classloader_ of the application. + +```scala +import scala.quoted.staging.* +given Compiler = Compiler.make(getClass.getClassLoader) ``` -The two environment combinators are both associative with left and -right identity `()`. -### Operational semantics +The classloader is needed for the compiler to know which dependencies have been loaded and to load the generated code using the same classloader. Below is an example method `mkPower2` that is passed to `staging.run`: + +```scala +def mkPower2()(using Quotes): Expr[Double => Double] = ... -We define a small step reduction relation `-->` with the following rules: +run(mkPower2()) ``` - ((x: T) => t) v --> [x := v]t +To run the previous example, the compiler will create code equivalent to the following class and compile it using a new `Scope` without parents. + +```scala +class RunInstance: + def exec(): Double => Double = ${ mkPower2() } +``` +Finally, `run` will interpret `(new RunInstance).exec()` to evaluate the contents of the quote. +To do this, the resulting `RunInstance` class is loaded in the JVM using Java Reflection, instantiated and then the `exec` method is invoked. + + +### Compilation + +Quotes and splices are primitive forms in the generated typed abstract syntax trees. +These need to be type-checked with some extra rules, e.g., staging levels need to be checked and the references to generic types need to be adapted. +Finally, quoted expressions that will be generated at run-time need to be encoded (serialized/pickled) and decoded (deserialized/unpickled). + +#### Typing Quoted Expressions - ${'u} --> u +The typing process for quoted expressions and splices with `Expr` is relatively straightforward. +At its core, quotes are desugared into calls to `quote`, splices are desugared into calls to `splice`. +We track the quotation level when desugaring into these methods. - t1 --> t2 - ----------------- - e[t1] --> e[t2] + +```scala +def quote[T](x: T): Quotes ?=> Expr[T] + +def splice[T](x: Quotes ?=> Expr[T]): T ``` -The first rule is standard call-by-value beta-reduction. The second -rule says that splice and quotes cancel each other out. The third rule -is a context rule; it says that reduction is allowed in the hole `[ ]` -position of an evaluation context. Evaluation contexts `e` and -splice evaluation context `e_s` are defined syntactically as follows: + +It would be impossible to track the quotation levels if users wrote calls to these methods directly. +To know if it is a call to one of those methods we would need to type it first, but to type it we would need to know if it is one of these methods to update the quotation level. +Therefore these methods can only be used by the compiler. + +At run-time, the splice needs to have a reference to the `Quotes` that created its surrounding quote. +To simplify this for later phases, we track the current `Quotes` and encode a reference directly in the splice using `nestedSplice` instead of `splice`. + +```scala +def nestedSplice[T](q: Quotes)(x: q.Nested ?=> Expr[T]): T ``` -Eval context e ::= [ ] | e t | v e | 'e_s[${e}] -Splice context e_s ::= [ ] | (x: T) => e_s | e_s t | u e_s +With this addition, the original `splice` is only used for top-level splices. + +The levels are mostly used to identify top-level splices that need to be evaluated while typing. +We do not use the quotation level to influence the typing process. +Level checking is performed at a later phase. +This ensures that a source expression in a quote will have the same elaboration as a source expression outside the quote. + + + +#### Quote Pattern Matching + +Pattern matching is defined in the trait `QuoteMatching`, which is part of the self type of `Quotes`. +It is implemented by `Quotes` but not available to users of `Quotes`. +To access it, the compiler generates a cast from `Quotes` to `QuoteMatching` and then selects one of its two members: `ExprMatch` or `TypeMatch`. +`ExprMatch` defines an `unapply` extractor method that is used to encode quote patterns and `TypeMatch` defines an `unapply` method for quoted type patterns. + +```scala +trait Quotes: + self: runtime.QuoteMatching & ... => + ... + +trait QuoteMatching: + object ExprMatch: + def unapply[TypeBindings <: Tuple, Tup <: Tuple] + (scrutinee: Expr[Any]) + (using pattern: Expr[Any]): Option[Tup] = ... + object TypeMatch: + ... ``` -### Typing rules +These extractor methods are only meant to be used in code generated by the compiler. +The call to the extractor that is generated has an already elaborated form that cannot be written in source, namely explicit type parameters and explicit contextual parameters. + +This extractor returns a tuple type `Tup` which cannot be inferred from the types in the method signature. +This type will be computed when typing the quote pattern and will be explicitly added to the extractor call. +To refer to type variables in arbitrary places of `Tup`, we need to define them all before their use, hence we have `TypeBindings`, which will contain all pattern type variable definitions. +The extractor also receives a given parameter of type `Expr[Any]` that will contain an expression that represents the pattern. +The compiler will explicitly add this pattern expression. +We use a given parameter because these are the only parameters we are allowed to add to the extractor call in a pattern position. + +This extractor is a bit convoluted, but it encodes away all the quotation-specific features. +It compiles the pattern down into a representation that the pattern matcher compiler phase understands. -Typing judgments are of the form `Es |- t: T`. There are two -substructural rules which express the fact that quotes and splices -cancel each other out: +The quote patterns are encoded into two parts: a tuple pattern that is tasked with extracting the result of the match and a quoted expression representing the pattern. +For example, if the pattern has no `$` we will have an `EmptyTuple` as the pattern and `'{1}` to represent the pattern. + +```scala + case '{ 1 } => +// is elaborated to + case ExprMatch(EmptyTuple)(using '{1}) => +// ^^^^^^^^^^ ^^^^^^^^^^ +// pattern expression +``` +When extracting expressions, each pattern that is contained in a splice `${..}` will be placed in order in the tuple pattern. +In the following case, the `f` and `x` are placed in a tuple pattern `(f, x)`. +The type of the tuple is encoded in the `Tup` and not only in the tuple itself. +Otherwise, the extractor would return a tuple `Tuple` for which the types need to be tested which is in turn not possible due to type erasure. + +```scala + case '{ ((y: Int) => $f(y)).apply($x) } => +// is elaborated to + case ExprMatch[.., (Expr[Int => Int], Expr[Int])]((f, x))(using pattern) => +// pattern = '{ ((y: Int) => pat[Int](y)).apply(pat[Int]()) } ``` - Es1 * Es2 |- t: T - --------------------------- - Es1 $ E1 ' E2 * Es2 |- t: T +The contents of the quote are transformed into a valid quote expression by replacing the splice with a marker expression `pat[T](..)`. +The type `T` is taken from the type of the splice and the arguments are the HOAS arguments. +This implies that a `pat[T]()` is a closed pattern and `pat[T](y)` is an HOAS pattern that can refer to `y`. - Es1 * Es2 |- t: T - --------------------------- - Es1 ' E1 $ E2 * Es2 |- t: T +Type variables in quoted patterns are first normalized to have all definitions at the start of the pattern. +For each definition of a type variable `t` in the pattern we will add a type variable definition in `TypeBindings`. +Each one will have a corresponding `Type[t]` that will get extracted if the pattern matches. +These `Type[t]` are also listed in the `Tup` and added in the tuple pattern. +It is additionally marked as `using` in the pattern to make it implicitly available in this case branch. + + +```scala + case '{ type t; ($xs: List[t]).map[t](identity[t]) } => +// is elaborated to + case ExprMatch[(t), (Type[t], Expr[List[t]])]((using t, xs))(using p) => +// ^^^ ^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^ ^^^^^^^ +// type bindings result type pattern expression +// p = '{ @patternType type u; pat[List[u]]().map[u](identity[u]) } ``` -The lambda calculus fragment of the rules is standard, except that we -use a stack of environments. The rules only interact with the topmost -environment of the stack. + +The contents of the quote are transformed into a valid quote expression by replacing type variables with fresh ones that do not escape the quote scope. +These are also annotated to be easily identifiable as pattern variables. + +#### Level Consistency Checking +Level consistency checking is performed after typing the program as a static check. +To check level consistency we traverse the tree top-down remembering the context staging level. +Each local definition in scope is recorded with its level and each term reference to a definition is checked against the current staging level. +```scala +// level 0 +'{ // level 1 + val x = ... // level 1 with (x -> 1) + ${ // level 0 (x -> 1) + val y = ... // level 0 with (x -> 1, y -> 0) + x // error: defined at level 1 but used in level 0 + } + // level 1 (x -> 1) + x // x is ok +} ``` - x: T in E - -------------- - Es * E |- x: T +#### Type Healing - Es * E, x: T1 |- t: T2 - ------------------------------- - Es * E |- (x: T1) => t: T -> T2 +When using a generic type `T` in a future stage, it is necessary to have a given `Type[T]` in scope. +The compiler needs to identify those references and link them with the instance of `Type[T]`. +For instance consider the following example: +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[T] } +``` - Es |- t1: T2 -> T Es |- t2: T2 - --------------------------------- - Es |- t1 t2: T +For each reference to a generic type `T` that is defined at level 0 and used at level 1 or greater, the compiler will summon a `Type[T]`. +This is usually the given type that is provided as parameter, `t` in this case. +We can use the type `t.Underlying` to replace `T` as it is an alias of that type. +But `t.Underlying` contains the extra information that it is `t` that will be used in the evaluation of the quote. +In a sense, `Underlying` acts like a splice for types. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[t.Underlying] } ``` -The rules for quotes and splices map between `expr T` and `T` by trading `'` and `$` between -environments and terms. + +Due to some technical limitations, it is not always possible to replace the type reference with the AST containing `t.Underlying`. +To overcome this limitation, we can simply define a list of type aliases at the start of the quote and insert the `t.Underlying` there. +This has the added advantage that we do not have to repeatedly insert the `t.Underlying` in the quote. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ type U = t.Underlying; List.empty[U] } +``` +These aliases can be used at any level within the quote and this transformation is only performed on quotes that are at level 0. + +```scala + '{ List.empty[T] ... '{ List.empty[T] } ... } +// becomes + '{ type U = t.Underlying; List.empty[U] ... '{ List.empty[U] } ... } +``` +If we define a generic type at level 1 or greater, it will not be subject to this transformation. +In some future compilation stage, when the definition of the generic type is at level 0, it will be subject to this transformation. +This simplifies the transformation logic and avoids leaking the encoding into code that a macro could inspect. + +```scala +'{ + def emptyList[T: Type](using Quotes): Expr[List[T]] = '{ List.empty[T] } + ... +} +``` +A similar transformation is performed on `Type.of[T]`. +Any generic type in `T` needs to have an implicitly given `Type[T]` in scope, which will also be used as a path. +The example: + +```scala +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[T] match ... +// becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[t.Underlying] match ... +// then becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + t match ... +``` + +The operation `Type.of[t.Underlying]` can be optimized to just `t`. +But this is not always the case. +If the generic reference is nested in the type, we will need to keep the `Type.of`. + +```scala +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[T]] match ... +// becomes +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[t.Underlying]] match ... +``` + +By doing this transformation, we ensure that each abstract type `U` used in `Type.of` has an implicit `Type[U]` in scope. +This representation makes it simpler to identify parts of the type that are statically known from those that are known dynamically. +Type aliases are also added within the type of the `Type.of` though these are not valid source code. +These would look like `Type.of[{type U = t.Underlying; Map[U, U]}]` if written in source code. + + +#### Splice Normalization + +The contents of a splice may refer to variables defined in the enclosing quote. +This complicates the process of serialization of the contents of the quotes. +To make serialization simple, we first transform the contents of each level 1 splice. +Consider the following example: + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ powerCode('{x}, n) } +} +``` + +The variable `x` is defined in the quote and used in the splice. +The normal form will extract all references to `x` and replace them with a staged version of `x`. +We will replace the reference to `x` of type `T` with a `$y` where `y` is of type `Expr[T]`. +Then we wrap the new contents of the splice in a lambda that defines `y` and apply it to the quoted version of `x`. +After this transformation we have 2 parts, a lambda without references to the quote, which knows how to compute the contents of the splice, and a sequence of quoted arguments that refer to variables defined in the lambda. + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ ((y: Expr[Int]) => powerCode('{$y}, n)).apply('x) } +} +``` + +In general, the splice normal form has the shape `${ .apply(*) }` and the following constraints: + * `` a lambda expression that does not refer to variables defined in the outer quote + * `` sequence of quoted expressions or `Type.of` containing references to variables defined in the enclosing quote and no references to local variables defined outside the enclosing quote + + +##### Function references normalization +A reference to a function `f` that receives parameters is not a valid value in Scala. +Such a function reference `f` can be eta-expanded as `x => f(x)` to be used as a lambda value. +Therefore function references cannot be transformed by the normalization as directly as other expressions as we cannot represent `'{f}` with a method reference type. +We can use the eta-expanded form of `f` in the normalized form. +For example, consider the reference to `f` below. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ '{ f(3)(4, 5) } } +} +``` + +To normalize this code, we can eta-expand the reference to `f` and place it in a quote containing a proper expression. +Therefore the normalized form of the argument `'{f}` becomes the quoted lambda `'{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }` and is an expression of type `Expr[Int => (Int, Int) => Int]`. +The eta-expansion produces one curried lambda per parameter list. +The application `f(3)(4, 5)` does not become `$g(3)(4, 5)` but `$g.apply(3).apply(4, 5)`. +We add the `apply` because `g` is not a quoted reference to a function but a curried lambda. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ + ( + (g: Expr[Int => (Int, Int) => Int]) => '{$g.apply(3).apply(4, 5)} + ).apply('{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }) + } +} +``` + +Then we can apply it and beta-reduce the application when generating the code. + +```scala + (g: Expr[Int => Int => Int]) => betaReduce('{$g.apply(3).apply(4)}) +``` + + +##### Variable assignment normalization +A reference to a mutable variable in the left-hand side of an assignment cannot be transformed directly as it is not in an expression position. +```scala +'{ + var x: Int = 5 + ${ g('{x = 2}) } +} +``` + +We can use the same strategy used for function references by eta-expanding the assignment operation `x = _` into `y => x = y`. + +```scala +'{ + var x: Int = 5 + ${ + g( + ( + (f: Expr[Int => Unit]) => betaReduce('{$f(2)}) + ).apply('{ (y: Int) => x = $y }) + ) + } +} +``` + + +##### Type normalization +Types defined in the quote are subject to a similar transformation. +In this example, `T` is defined within the quote at level 1 and used in the splice again at level 1. + +```scala +'{ def f[T] = ${ '{g[T]} } } +``` + +The normalization will add a `Type[T]` to the lambda, and we will insert this reference. +The difference is that it will add an alias similar to the one used in type healing. +In this example, we create a `type U` that aliases the staged type. + +```scala +'{ + def f[T] = ${ + ( + (t: Type[T]) => '{type U = t.Underling; g[U]} + ).apply(Type.of[T]) + } +} +``` + +#### Serialization + +Quoted code needs to be pickled to make it available at run-time in the next compilation phase. +We implement this by pickling the AST as a TASTy binary. + +##### TASTy +The TASTy format is the typed abstract syntax tree serialization format of Scala 3. +It usually pickles the fully elaborated code after type-checking and is kept along the generated Java classfiles. + + +##### Pickling +We use TASTy as a serialization format for the contents of the quotes. +To show how serialization is performed, we will use the following example. +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ powerCode('{x}, '{n}) } * ${ powerCode('{2}, '{n}) } +} ``` - Es $ () |- t: expr T - -------------------- - Es |- $t: T +This quote is transformed into the following code when normalizing the splices. - Es ' () |- t: T - ---------------- - Es |- 't: expr T +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) + } * ${ + ((m: Expr[Int]) => powerCode('{2}, m)).apply('n) + } +} ``` -The meta theory of a slightly simplified 2-stage variant of this calculus -is studied [separately](./simple-smp.md). -## Going Further +Splice normalization is a key part of the serialization process as it only allows references to variables defined in the quote in the arguments of the lambda in the splice. +This makes it possible to create a closed representation of the quote without much effort. +The first step is to remove all the splices and replace them with holes. +A hole is like a splice but it lacks the knowledge of how to compute the contents of the splice. +Instead, it knows the index of the hole and the contents of the arguments of the splice. +We can see this transformation in the following example where a hole is represented by `<< idx; holeType; args* >>`. + +```scala + ${ ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) } +// becomes + << 0; Double; x, n >> +``` -The metaprogramming framework as presented and currently implemented is quite restrictive -in that it does not allow for the inspection of quoted expressions and -types. It’s possible to work around this by providing all necessary -information as normal, unquoted inline parameters. But we would gain -more flexibility by allowing for the inspection of quoted code with -pattern matching. This opens new possibilities. +As this was the first hole it has index 0. +The hole type is `Double`, which needs to be remembered now that we cannot infer it from the contents of the splice. +The arguments of the splice are `x` and `n`; note that they do not require quoting because they were moved out of the splice. -For instance, here is a version of `power` that generates the multiplications -directly if the exponent is statically known and falls back to the dynamic -implementation of `power` otherwise. +References to healed types are handled in a similar way. +Consider the `emptyList` example, which shows the type aliases that are inserted into the quote. ```scala -import scala.quoted.* +'{ List.empty[T] } +// type healed to +'{ type U = t.Underlying; List.empty[U] } +``` +Instead of replacing a splice, we replace the `t.Underlying` type with a type hole. +The type hole is represented by `<< idx; bounds >>`. +```scala +'{ type U = << 0; Nothing..Any >>; List.empty[U] } +``` +Here, the bounds of `Nothing..Any` are the bounds of the original `T` type. +The types of a `Type.of` are transformed in the same way. -inline def power(x: Double, n: Int): Double = - ${ powerExpr('x, 'n) } -private def powerExpr(x: Expr[Double], n: Expr[Int]) - (using Quotes): Expr[Double] = - n.value match - case Some(m) => powerExpr(x, m) - case _ => '{ dynamicPower($x, $n) } +With these transformations, the contents of the quote or `Type.of` are guaranteed to be closed and therefore can be pickled. +The AST is pickled into TASTy, which is a sequence of bytes. +This sequence of bytes needs to be instantiated in the bytecode, but unfortunately it cannot be dumped into the classfile as bytes. +To reify it we encode the bytes into a Java `String`. +In the following examples we display this encoding in human readable form with the fictitious `|tasty"..."|` string literal. -private def powerExpr(x: Expr[Double], n: Int) - (using Quotes): Expr[Double] = - if n == 0 then '{ 1.0 } - else if n == 1 then x - else if n % 2 == 0 then '{ val y = $x * $x; ${ powerExpr('y, n / 2) } } - else '{ $x * ${ powerExpr(x, n - 1) } } +```scala +// pickled AST bytes encoded in a base64 string +tasty""" + val (x, n): (Double, Int) = (5, 2) + << 0; Double; x, n >> * << 1; Double; n >> +""" +// or +tasty""" + type U = << 0; Nothing..Any; >> + List.empty[U] +""" +``` +The contents of a quote or `Type.of` are not always pickled. +In some cases it is better to generate equivalent (smaller and/or faster) code that will compute the expression. +Literal values are compiled into a call to `Expr()` using the implementation of `ToExpr` to create the quoted expression. +This is currently performed only on literal values, but can be extended to any value for which we have a `ToExpr` defined in the standard library. +Similarly, for non-generic types we can use their respective `java.lang.Class` and convert them into a `Type` using a primitive operation `typeConstructorOf` defined in the reflection API. -private def dynamicPower(x: Double, n: Int): Double = - if n == 0 then 1.0 - else if n % 2 == 0 then dynamicPower(x * x, n / 2) - else x * dynamicPower(x, n - 1) +##### Unpickling + +Now that we have seen how a quote is pickled, we can look at how to unpickle it. +We will continue with the previous example. + +Holes were used to replace the splices in the quote. +When we perform this transformation we also need to remember the lambdas from the splices and their hole index. +When unpickling a hole, the corresponding splice lambda will be used to compute the contents of the hole. +The lambda will receive as parameters quoted versions of the arguments of the hole. +For example to compute the contents of `<< 0; Double; x, n >>` we will evaluate the following code + +```scala + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) ``` -In the above, the method `.value` maps a constant expression of the type -`Expr[T]` to its value of the type `T`. +The evaluation is not as trivial as it looks, because the lambda comes from compiled code and the rest is code that must be interpreted. +We put the AST of `x` and `n` into `Expr` objects to simulate the quotes and then we use Java Reflection to call the `apply` method. + +We may have many holes in a quote and therefore as many lambdas. +To avoid the instantiation of many lambdas, we can join them together into a single lambda. +Apart from the list of arguments, this lambda will also take the index of the hole that is being evaluated. +It will perform a switch match on the index and call the corresponding lambda in each branch. +Each branch will also extract the arguments depending on the definition of the lambda. +The application of the original lambdas are beta-reduced to avoid extra overhead. -With the right extractors, the "AsFunction" conversion -that maps expressions over functions to functions over expressions can -be implemented in user code: ```scala -given AsFunction1[T, U]: Conversion[Expr[T => U], Expr[T] => Expr[U]] with - def apply(f: Expr[T => U]): Expr[T] => Expr[U] = - (x: Expr[T]) => f match - case Lambda(g) => g(x) - case _ => '{ ($f)($x) } +(idx: Int, args: Seq[Any]) => + idx match + case 0 => // for << 0; Double; x, n >> + val x = args(0).asInstanceOf[Expr[Double]] + val n = args(1).asInstanceOf[Expr[Int]] + powerCode(x, n) + case 1 => // for << 1; Double; n >> + val n = args(0).asInstanceOf[Expr[Int]] + powerCode('{2}, n) ``` -This assumes an extractor + +This is similar to what we do for splices when we replace the type aliased with holes we keep track of the index of the hole. +Instead of lambdas, we will have a list of references to instances of `Type`. +From the following example we would extract `t`, `u`, ... . + ```scala -object Lambda: - def unapply[T, U](x: Expr[T => U]): Option[Expr[T] => Expr[U]] + '{ type T1 = t1.Underlying; type Tn = tn.Underlying; ... } +// with holes + '{ type T1 = << 0; ... >>; type Tn = << n-1; ... >>; ... } ``` -Once we allow inspection of code via extractors, it’s tempting to also -add constructors that create typed trees directly without going -through quotes. Most likely, those constructors would work over `Expr` -types which lack a known type argument. For instance, an `Apply` -constructor could be typed as follows: + +As the type holes are at the start of the quote, they will have the first `N` indices. +This implies that we can place the references in a sequence `Seq(t, u, ...)` where the index in the sequence is the same as the hole index. + +Lastly, the quote itself is replaced by a call to `QuoteUnpickler.unpickleExpr` which will unpickle the AST, evaluate the holes, i.e., splices, and wrap the resulting AST in an `Expr[Int]`. +This method takes takes the pickled `|tasty"..."|`, the types and the hole lambda. +Similarly, `Type.of` is replaced with a call to `QuoteUnpickler.unpickleType` but only receives the pickled `|tasty"..."|` and the types. +Because `QuoteUnpickler` is part of the self-type of the `Quotes` class, we have to cast the instance but know that this cast will always succeed. + ```scala -def Apply(fn: Expr[Any], args: List[Expr[Any]]): Expr[Any] +quotes.asInstanceOf[runtime.QuoteUnpickler].unpickleExpr[T]( + pickled = tasty"...", + types = Seq(...), + holes = (idx: Int, args: Seq[Any]) => idx match ... +) ``` -This would allow constructing applications from lists of arguments -without having to match the arguments one-by-one with the -corresponding formal parameter types of the function. We then need "at -the end" a method to convert an `Expr[Any]` to an `Expr[T]` where `T` is -given from the outside. For instance, if `code` yields a `Expr[Any]`, then -`code.atType[T]` yields an `Expr[T]`. The `atType` method has to be -implemented as a primitive; it would check that the computed type -structure of `Expr` is a subtype of the type structure representing -`T`. -Before going down that route, we should evaluate in detail the tradeoffs it -presents. Constructing trees that are only verified _a posteriori_ -to be type correct loses a lot of guidance for constructing the right -trees. So we should wait with this addition until we have more -use-cases that help us decide whether the loss in type-safety is worth -the gain in flexibility. In this context, it seems that deconstructing types is -less error-prone than deconstructing terms, so one might also -envisage a solution that allows the former but not the latter. - -## Conclusion - -Metaprogramming has a reputation of being difficult and confusing. -But with explicit `Expr/Type` types and quotes and splices it can become -downright pleasant. A simple strategy first defines the underlying quoted or unquoted -values using `Expr` and `Type` and then inserts quotes and splices to make the types -line up. Phase consistency is at the same time a great guideline -where to insert a splice or a quote and a vital sanity check that -the result makes sense. +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Multi-stage programming with generative and analytical macros](https://dl.acm.org/doi/10.1145/3486609.3487203). +[^3]: In quotes, identifiers starting with `$` must be surrounded by backticks (`` `$` ``). For example `$conforms` from `scala.Predef`. diff --git a/docs/_docs/reference/metaprogramming/macros.md b/docs/_docs/reference/metaprogramming/macros.md index 0be48ef2baf8..a91e69d985f0 100644 --- a/docs/_docs/reference/metaprogramming/macros.md +++ b/docs/_docs/reference/metaprogramming/macros.md @@ -6,843 +6,617 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros.h > When developing macros enable `-Xcheck-macros` scalac option flag to have extra runtime checks. -## Macros: Quotes and Splices +## Multi-Staging -Macros are built on two well-known fundamental operations: quotation and splicing. -Quotation is expressed as `'{...}` for expressions and splicing is expressed as `${ ... }`. -Additionally, within a quote or a splice we can quote or splice identifiers directly (i.e. `'e` and `$e`). -Readers may notice the resemblance of the two aforementioned syntactic -schemes with the familiar string interpolation syntax. +#### Quoted expressions +Multi-stage programming in Scala 3 uses quotes `'{..}` to delay, i.e., stage, execution of code and splices `${..}` to evaluate and insert code into quotes. +Quoted expressions are typed as `Expr[T]` with a covariant type parameter `T`. +It is easy to write statically safe code generators with these two concepts. +The following example shows a naive implementation of the $x^n$ mathematical operation. ```scala -println(s"Hello, $name, here is the result of 1 + 1 = ${1 + 1}") +import scala.quoted.* +def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } + else if n == 1 then x + else '{ $x * ${ unrolledPowerCode(x, n-1) } } ``` -In string interpolation we _quoted_ a string and then we _spliced_ into it, two others. The first, `name`, is a reference to a value of type [`String`](https://scala-lang.org/api/3.x/scala/Predef$.html#String-0), and the second is an arithmetic expression that will be _evaluated_ followed by the splicing of its string representation. - -Quotes and splices in this section allow us to treat code in a similar way, -effectively supporting macros. The entry point for macros is an inline method -with a top-level splice. We call it a top-level because it is the only occasion -where we encounter a splice outside a quote (consider as a quote the -compilation-unit at the call-site). For example, the code below presents an -`inline` method `assert` which calls at compile-time a method `assertImpl` with -a boolean expression tree as argument. `assertImpl` evaluates the expression and -prints it again in an error message if it evaluates to `false`. - ```scala -import scala.quoted.* - -inline def assert(inline expr: Boolean): Unit = - ${ assertImpl('expr) } - -def assertImpl(expr: Expr[Boolean])(using Quotes) = '{ - if !$expr then - throw AssertionError(s"failed assertion: ${${ showExpr(expr) }}") +'{ + val x = ... + ${ unrolledPowerCode('{x}, 3) } // evaluates to: x * x * x } - -def showExpr(expr: Expr[Boolean])(using Quotes): Expr[String] = - '{ [actual implementation later in this document] } ``` -If `e` is an expression, then `'{e}` represents the typed -abstract syntax tree representing `e`. If `T` is a type, then `Type.of[T]` -represents the type structure representing `T`. The precise -definitions of "typed abstract syntax tree" or "type-structure" do not -matter for now, the terms are used only to give some -intuition. Conversely, `${e}` evaluates the expression `e`, which must -yield a typed abstract syntax tree or type structure, and embeds the -result as an expression (respectively, type) in the enclosing program. +Quotes and splices are duals of each other. +For an arbitrary expression `x` of type `T` we have `${'{x}} = x` and for an arbitrary expression `e` of type `Expr[T]` we have `'{${e}} = e`. -Quotations can have spliced parts in them; in this case the embedded -splices are evaluated and embedded as part of the formation of the -quotation. +#### Abstract types +Quotes can handle generic and abstract types using the type class `Type[T]`. +A quote that refers to a generic or abstract type `T` requires a given `Type[T]` to be provided in the implicit scope. +The following examples show how `T` is annotated with a context bound (`: Type`) to provide an implicit `Type[T]`, or the equivalent `using Type[T]` parameter. -Quotes and splices can also be applied directly to identifiers. An identifier -`$x` starting with a `$` that appears inside a quoted expression or type is treated as a -splice `${x}`. Analogously, an quoted identifier `'x` that appears inside a splice -is treated as a quote `'{x}`. See the Syntax section below for details. +```scala +import scala.quoted.* +def singletonListExpr[T: Type](x: Expr[T])(using Quotes): Expr[List[T]] = + '{ List[T]($x) } // generic T used within a quote -Quotes and splices are duals of each other. -For arbitrary expressions `e` we have: +def emptyListExpr[T](using Type[T], Quotes): Expr[List[T]] = + '{ List.empty[T] } // generic T used within a quote +``` +If no other instance is found, the default `Type.of[T]` is used. +The following example implicitly uses `Type.of[String]` and `Type.of[Option[U]]`. ```scala -${'{e}} = e -'{${e}} = e +val list1: Expr[List[String]] = + singletonListExpr('{"hello"}) // requires a given `Type[Sting]` +val list0: Expr[List[Option[T]]] = + emptyListExpr[Option[U]] // requires a given `Type[Option[U]]` ``` -## Types for Quotations - -The type signatures of quotes and splices can be described using -two fundamental types: -- `Expr[T]`: abstract syntax trees representing expressions of type `T` -- `Type[T]`: non erased representation of type `T`. - -Quoting takes expressions of type `T` to expressions of type `Expr[T]` -and it takes types `T` to expressions of type `Type[T]`. Splicing -takes expressions of type `Expr[T]` to expressions of type `T` and it -takes expressions of type `Type[T]` to types `T`. - -The two types can be defined in package [`scala.quoted`](https://scala-lang.org/api/3.x/scala/quoted.html) as follows: +The `Type.of[T]` method is a primitive operation that the compiler will handle specially. +It will provide the implicit if the type `T` is statically known, or if `T` contains some other types `Ui` for which we have an implicit `Type[Ui]`. +In the example, `Type.of[String]` has a statically known type and `Type.of[Option[U]]` requires an implicit `Type[U]` in scope. +#### Quote context +We also track the current quotation context using a given `Quotes` instance. +To create a quote `'{..}` we require a given `Quotes` context, which should be passed as a contextual parameter `(using Quotes)` to the function. +Each splice will provide a new `Quotes` context within the scope of the splice. +Therefore quotes and splices can be seen as methods with the following signatures, but with special semantics. ```scala -package scala.quoted +def '[T](x: T): Quotes ?=> Expr[T] // def '[T](x: T)(using Quotes): Expr[T] -sealed trait Expr[+T] -sealed trait Type[T] +def $[T](x: Quotes ?=> Expr[T]): T ``` -Both `Expr` and `Type` are abstract and sealed, so all constructors for -these types are provided by the system. One way to construct values of -these types is by quoting, the other is by type-specific lifting -operations that will be discussed later on. +The lambda with a question mark `?=>` is a contextual function; it is a lambda that takes its argument implicitly and provides it implicitly in the implementation the lambda. +`Quotes` are used for a variety of purposes that will be mentioned when covering those topics. -## The Phase Consistency Principle +## Quoted Values -A fundamental *phase consistency principle* (PCP) regulates accesses -to free variables in quoted and spliced code: +#### Lifting +While it is not possible to use cross-stage persistence of local variables, it is possible to lift them to the next stage. +To this end, we provide the `Expr.apply` method, which can take a value and lift it into a quoted representation of the value. -- _For any free variable reference `x`, the number of quoted scopes and the number of spliced scopes between the reference to `x` and the definition of `x` must be equal_. - -Here, `this`-references count as free variables. On the other -hand, we assume that all imports are fully expanded and that `_root_` is -not a free variable. So references to global definitions are -allowed everywhere. +```scala +val expr1plus1: Expr[Int] = '{ 1 + 1 } -The phase consistency principle can be motivated as follows: First, -suppose the result of a program `P` is some quoted text `'{ ... x -... }` that refers to a free variable `x` in `P`. This can be -represented only by referring to the original variable `x`. Hence, the -result of the program will need to persist the program state itself as -one of its parts. We don’t want to do this, hence this situation -should be made illegal. Dually, suppose a top-level part of a program -is a spliced text `${ ... x ... }` that refers to a free variable `x` -in `P`. This would mean that we refer during _construction_ of `P` to -a value that is available only during _execution_ of `P`. This is of -course impossible and therefore needs to be ruled out. Now, the -small-step evaluation of a program will reduce quotes and splices in -equal measure using the cancellation rules above. But it will neither -create nor remove quotes or splices individually. So the PCP ensures -that program elaboration will lead to neither of the two unwanted -situations described above. +val expr2: Expr[Int] = Expr(1 + 1) // lift 2 into '{ 2 } +``` -In what concerns the range of features it covers, this form of macros introduces -a principled metaprogramming framework that is quite close to the MetaML family of -languages. One difference is that MetaML does not have an equivalent of the PCP - -quoted code in MetaML _can_ access variables in its immediately enclosing -environment, with some restrictions and caveats since such accesses involve -serialization. However, this does not constitute a fundamental gain in -expressiveness. +While it looks type wise similar to `'{ 1 + 1 }`, the semantics of `Expr(1 + 1)` are quite different. +`Expr(1 + 1)` will not stage or delay any computation; the argument is evaluated to a value and then lifted into a quote. +The quote will contain code that will create a copy of this value in the next stage. +`Expr` is polymorphic and user-extensible via the `ToExpr` type class. -## From `Expr`s to Functions and Back +```scala +trait ToExpr[T]: + def apply(x: T)(using Quotes): Expr[T] +``` -It is possible to convert any `Expr[T => R]` into `Expr[T] => Expr[R]` and back. -These conversions can be implemented as follows: +We can implement a `ToExpr` using a `given` definition that will add the definition to the implicits in scope. +In the following example we show how to implement a `ToExpr[Option[T]]` for any liftable type `T. ```scala -def to[T: Type, R: Type](f: Expr[T] => Expr[R])(using Quotes): Expr[T => R] = - '{ (x: T) => ${ f('x) } } - -def from[T: Type, R: Type](f: Expr[T => R])(using Quotes): Expr[T] => Expr[R] = - (x: Expr[T]) => '{ $f($x) } +given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with + def apply(opt: Option[T])(using Quotes): Expr[Option[T]] = + opt match + case Some(x) => '{ Some[T]( ${Expr(x)} ) } + case None => '{ None } ``` -Note how the fundamental phase consistency principle works in two -different directions here for `f` and `x`. In the method `to`, the reference to `f` is -legal because it is quoted, then spliced, whereas the reference to `x` -is legal because it is spliced, then quoted. +The `ToExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. -They can be used as follows: +#### Extracting values from quotes +To be able to generate optimized code using the method `unrolledPowerCode`, the macro implementation `powerCode` needs to first +determine whether the argument passed as parameter `n` is a known constant value. +This can be achieved via _unlifting_ using the `Expr.unapply` extractor from our library implementation, which will only match if `n` is a quoted constant and extracts its value. ```scala -val f1: Expr[Int => String] = - to((x: Expr[Int]) => '{ $x.toString }) // '{ (x: Int) => x.toString } - -val f2: Expr[Int] => Expr[String] = - from('{ (x: Int) => x.toString }) // (x: Expr[Int]) => '{ ((x: Int) => x.toString)($x) } -f2('{2}) // '{ ((x: Int) => x.toString)(2) } +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + n match + case Expr(m) => // it is a constant: unlift code n='{m} into number m + unrolledPowerCode(x, m) + case _ => // not known: call power at run-time + '{ power($x, $n) } ``` -One limitation of `from` is that it does not β-reduce when a lambda is called immediately, as evidenced in the code `{ ((x: Int) => x.toString)(2) }`. -In some cases we want to remove the lambda from the code, for this we provide the method `Expr.betaReduce` that turns a tree -describing a function into a function mapping trees to trees. - +Alternatively, the `n.value` method can be used to get an `Option[Int]` with the value or `n.valueOrAbort` to get the value directly. ```scala -object Expr: - ... - def betaReduce[T](expr: Expr[T])(using Quotes): Expr[T] +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + // emits an error message if `n` is not a constant + unrolledPowerCode(x, n.valueOrAbort) ``` -`Expr.betaReduce` returns an expression that is functionally equivalent to e, however if e is of the form `((y1, ..., yn) => e2)(e1, ..., en)` then it optimizes the top most call by returning the result of beta-reducing the application. Otherwise returns expr. - -## Lifting Types - -Types are not directly affected by the phase consistency principle. -It is possible to use types defined at any level in any other level. -But, if a type is used in a subsequent stage it will need to be lifted to a `Type`. -Indeed, the definition of `to` above uses `T` in the next stage, there is a -quote but no splice between the parameter binding of `T` and its -usage. But the code can be rewritten by adding an explicit binding of a `Type[T]`: +`Expr.unapply` and all variants of `value` are polymorphic and user-extensible via a given `FromExpr` type class. ```scala -def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = - '{ (x: t.Underlying) => ${ f('x) } } +trait FromExpr[T]: + def unapply(x: Expr[T])(using Quotes): Option[T] ``` -In this version of `to`, the type of `x` is now the result of -inserting the type `Type[T]` and selecting its `Underlying`. +We can use `given` definitions to implement the `FromExpr` as we did for `ToExpr`. +The `FromExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. +To implement `FromExpr` for non-primitive types we use quote pattern matching (for example `OptionFromExpr`). + + +## Macros and Multi-Stage Programming -To avoid clutter, the compiler converts any type reference to -a type `T` in subsequent phases to `summon[Type[T]].Underlying`. +The system supports multi-stage macros and run-time multi-stage programming using the same quotation abstractions. -And to avoid duplication it does it once per type, and creates -an alias for that type at the start of the quote. +### Multi-Stage Macros -For instance, the user-level definition of `to`: +#### Macros +We can generalize the splicing abstraction to express macros. +A macro consists of a top-level splice that is not nested in any quote. +Conceptually, the contents of the splice are evaluated one stage earlier than the program. +In other words, the contents are evaluated while compiling the program. The generated code resulting from the macro replaces the splice in the program. ```scala -def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = - '{ (x: T) => ${ f('x) } } +def power2(x: Double): Double = + ${ unrolledPowerCode('x, 2) } // x * x ``` -would be rewritten to +#### Inline macros +Since using the splices in the middle of a program is not as ergonomic as calling a function; we hide the staging mechanism from end-users of macros. We have a uniform way of calling macros and normal functions. +For this, _we restrict the use of top-level splices to only appear in inline methods_[^1][^2]. ```scala -def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = - '{ - type T = summon[Type[T]].Underlying - (x: T) => ${ f('x) } - } +// inline macro definition +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + +// user code +def power2(x: Double): Double = + powerMacro(x, 2) // x * x ``` -The `summon` query succeeds because there is a using parameter of -type `Type[T]`, and the reference to that value is -phase-correct. If that was not the case, the phase inconsistency for -`T` would be reported as an error. +The evaluation of the macro will only happen when the code is inlined into `power2`. +When inlined, the code is equivalent to the previous definition of `power2`. +A consequence of using inline methods is that none of the arguments nor the return type of the macro will have to mention the `Expr` types; this hides all aspects of metaprogramming from the end-users. -## Lifting Expressions +#### Avoiding a complete interpreter +When evaluating a top-level splice, the compiler needs to interpret the code that is within the splice. +Providing an interpreter for the entire language is quite tricky, and it is even more challenging to make that interpreter run efficiently. +To avoid needing a complete interpreter, we can impose the following restrictions on splices to simplify the evaluation of the code in top-level splices. + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are literal constants, quoted expressions (parameters), calls to `Type.of` for type parameters and a reference to `Quotes`. -Consider the following implementation of a staged interpreter that implements -a compiler through staging. +In particular, these restrictions disallow the use of splices in top-level splices. +Such a splice would require several stages of interpretation which would be unnecessarily inefficient. +#### Compilation stages +The macro implementation (i.e., the method called in the top-level splice) can come from any pre-compiled library. +This provides a clear difference between the stages of the compilation process. +Consider the following 3 source files defined in distinct libraries. ```scala -import scala.quoted.* - -enum Exp: - case Num(n: Int) - case Plus(e1: Exp, e2: Exp) - case Var(x: String) - case Let(x: String, e: Exp, in: Exp) - -import Exp.* +// Macro.scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = ... +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } ``` -The interpreted language consists of numbers `Num`, addition `Plus`, and variables -`Var` which are bound by `Let`. Here are two sample expressions in the language: - ```scala -val exp = Plus(Plus(Num(2), Var("x")), Num(4)) -val letExp = Let("x", Num(3), exp) +// Lib.scala (depends on Macro.scala) +def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) ``` -Here’s a compiler that maps an expression given in the interpreted -language to quoted Scala code of type `Expr[Int]`. -The compiler takes an environment that maps variable names to Scala `Expr`s. - ```scala -import scala.quoted.* - -def compile(e: Exp, env: Map[String, Expr[Int]])(using Quotes): Expr[Int] = - e match - case Num(n) => - Expr(n) - case Plus(e1, e2) => - '{ ${ compile(e1, env) } + ${ compile(e2, env) } } - case Var(x) => - env(x) - case Let(x, e, body) => - '{ val y = ${ compile(e, env) }; ${ compile(body, env + (x -> 'y)) } } +// App.scala (depends on Lib.scala) +@main def app() = power2(3.14) ``` - -Running `compile(letExp, Map())` would yield the following Scala code: +One way to syntactically visualize this is to put the application in a quote that delays the compilation of the application. +Then the application dependencies can be placed in an outer quote that contains the quoted application, and we repeat this recursively for dependencies of dependencies. ```scala -'{ val y = 3; (2 + y) + 4 } +'{ // macro library (compilation stage 1) + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + ... + inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + '{ // library using macros (compilation stage 2) + def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) + '{ power2(3.14) /* app (compilation stage 3) */ } + } +} ``` -The body of the first clause, `case Num(n) => Expr(n)`, looks suspicious. `n` -is declared as an `Int`, yet it is converted to an `Expr[Int]` with `Expr()`. -Shouldn’t `n` be quoted? In fact this would not -work since replacing `n` by `'n` in the clause would not be phase -correct. +To make the system more versatile, we allow calling macros in the project where it is defined, with some restrictions. +For example, to compile `Macro.scala` and `Lib.scala` together in the same library. +To this end, we do not follow the simpler syntactic model and rely on semantic information from the source files. +When compiling a source, if we detect a call to a macro that is not compiled yet, we delay the compilation of this source to the following compilation stage. +In the example, we would delay the compilation of `Lib.scala` because it contains a compile-time call to `powerCode`. +Compilation stages are repeated until all sources are compiled, or no progress can be made. +If no progress is made, there was a cyclic dependency between the definition and the use of the macro. +We also need to detect if at runtime the macro depends on sources that have not been compiled yet. +These are detected by executing the macro and checking for JVM linking errors to classes that have not been compiled yet. -The `Expr.apply` method is defined in package `quoted`: +### Run-Time Multi-Stage Programming -```scala -package quoted +See [Run-Time Multi-Stage Programming](./staging.md) -object Expr: - ... - def apply[T: ToExpr](x: T)(using Quotes): Expr[T] = - summon[ToExpr[T]].toExpr(x) -``` +## Safety -This method says that values of types implementing the `ToExpr` type class can be -converted to `Expr` values using `Expr.apply`. +Multi-stage programming is by design statically safe and cross-stage safe. -Scala 3 comes with given instances of `ToExpr` for -several types including `Boolean`, `String`, and all primitive number -types. For example, `Int` values can be converted to `Expr[Int]` -values by wrapping the value in a `Literal` tree node. This makes use -of the underlying tree representation in the compiler for -efficiency. But the `ToExpr` instances are nevertheless not _magic_ -in the sense that they could all be defined in a user program without -knowing anything about the representation of `Expr` trees. For -instance, here is a possible instance of `ToExpr[Boolean]`: +### Static Safety -```scala -given ToExpr[Boolean] with - def toExpr(b: Boolean) = - if b then '{ true } else '{ false } -``` +#### Hygiene +All identifier names are interpreted as symbolic references to the corresponding variable in the context of the quote. +Therefore, while evaluating the quote, it is not possible to accidentally rebind a reference to a new variable with the same textual name. -Once we can lift bits, we can work our way up. For instance, here is a -possible implementation of `ToExpr[Int]` that does not use the underlying -tree machinery: +#### Well-typed +If a quote is well typed, then the generated code is well typed. +This is a simple consequence of tracking the type of each expression. +An `Expr[T]` can only be created from a quote that contains an expression of type `T`. +Conversely, an `Expr[T]` can only be spliced in a location that expects a type `T. +As mentioned before, `Expr` is covariant in its type parameter. +This means that an `Expr[T]` can contain an expression of a subtype of `T`. +When spliced in a location that expects a type `T, these expressions also have a valid type. -```scala -given ToExpr[Int] with - def toExpr(n: Int) = n match - case Int.MinValue => '{ Int.MinValue } - case _ if n < 0 => '{ - ${ toExpr(-n) } } - case 0 => '{ 0 } - case _ if n % 2 == 0 => '{ ${ toExpr(n / 2) } * 2 } - case _ => '{ ${ toExpr(n / 2) } * 2 + 1 } -``` +### Cross-Stage Safety + +#### Level consistency +We define the _staging level_ of some code as the number of quotes minus the number of splices surrounding said code. +Local variables must be defined and used in the same staging level. -Since `ToExpr` is a type class, its instances can be conditional. For example, -a `List` is liftable if its element type is: +It is never possible to access a local variable from a lower staging level as it does not yet exist. ```scala -given [T: ToExpr : Type]: ToExpr[List[T]] with - def toExpr(xs: List[T]) = xs match - case head :: tail => '{ ${ Expr(head) } :: ${ toExpr(tail) } } - case Nil => '{ Nil: List[T] } +def badPower(x: Double, n: Int): Double = + ${ unrolledPowerCode('x, n) } // error: value of `n` not known yet ``` -In the end, `ToExpr` resembles very much a serialization -framework. Like the latter it can be derived systematically for all -collections, case classes and enums. Note also that the synthesis -of _type-tag_ values of type `Type[T]` is essentially the type-level -analogue of lifting. -Using lifting, we can now give the missing definition of `showExpr` in the introductory example: +In the context of macros and _cross-platform portability_, that is, +macros compiled on one machine but potentially executed on another, +we cannot support cross-stage persistence of local variables. +Therefore, local variables can only be accessed at precisely the same staging level in our system. ```scala -def showExpr[T](expr: Expr[T])(using Quotes): Expr[String] = - val code: String = expr.show - Expr(code) +def badPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + // error: `n` potentially not available in the next execution environment + '{ power($x, n) } ``` -That is, the `showExpr` method converts its `Expr` argument to a string (`code`), and lifts -the result back to an `Expr[String]` using `Expr.apply`. -## Lifting Types +The rules are slightly different for global definitions, such as `unrolledPowerCode`. +It is possible to generate code that contains a reference to a _global_ definition such as in `'{ power(2, 4) }`. +This is a limited form of cross-stage persistence that does not impede cross-platform portability, where we refer to the already compiled code for `power`. +Each compilation step will lower the staging level by one while keeping global definitions. +In consequence, we can refer to compiled definitions in macros such as `unrolledPowerCode` in `${ unrolledPowerCode('x, 2) }`. -The previous section has shown that the metaprogramming framework has -to be able to take a type `T` and convert it to a type tree of type -`Type[T]` that can be reified. This means that all free variables of -the type tree refer to types and values defined in the current stage. +We can sumarize level consistency in two rules: + * Local variables can be used only at the same staging level as their definition + * Global variables can be used at any staging level -For a reference to a global class, this is easy: Just issue the fully -qualified name of the class. Members of reifiable types are handled by -just reifying the containing type together with the member name. But -what to do for references to type parameters or local type definitions -that are not defined in the current stage? Here, we cannot construct -the `Type[T]` tree directly, so we need to get it from a recursive -implicit search. For instance, to implement +#### Type consistency +As Scala uses type erasure, generic types will be erased at run-time and hence in any following stage. +To ensure any quoted expression that refers to a generic type `T` does not lose the information it needs, we require a given `Type[T]` in scope. +The `Type[T]` will carry over the non-erased representation of the type into the next phase. +Therefore any generic type used at a higher staging level than its definition will require its `Type`. + +#### Scope extrusion +Within the contents of a splice, it is possible to have a quote that refers to a local variable defined in the outer quote. +If this quote is used within the splice, the variable will be in scope. +However, if the quote is somehow _extruded_ outside the splice, then variables might not be in scope anymore. +Quoted expressions can be extruded using side effects such as mutable state and exceptions. +The following example shows how a quote can be extruded using mutable state. ```scala -summon[Type[List[T]]] +var x: Expr[T] = null +'{ (y: T) => ${ x = 'y; 1 } } +x // has value '{y} but y is not in scope ``` -where `T` is not defined in the current stage, we construct the type constructor -of `List` applied to the splice of the result of searching for a given instance for `Type[T]`: +A second way a variable can be extruded is through the `run` method. +If `run` consumes a quoted variable reference, it will not be in scope anymore. +The result will reference a variable that is defined in the next stage. ```scala -Type.of[ List[ summon[Type[T]].Underlying ] ] +'{ (x: Int) => ${ run('x); ... } } +// evaluates to: '{ (x: Int) => ${ x; ... } 1 ``` -This is exactly the algorithm that Scala 2 uses to search for type tags. -In fact Scala 2's type tag feature can be understood as a more ad-hoc version of -`quoted.Type`. As was the case for type tags, the implicit search for a `quoted.Type` -is handled by the compiler, using the algorithm sketched above. +To catch both scope extrusion scenarios, our system restricts the use of quotes by only allowing a quote to be spliced if it was not extruded from a splice scope. +Unlike level consistency, this is checked at run-time[^4] rather than compile-time to avoid making the static type system too complicated. -## Relationship with `inline` +Each `Quotes` instance contains a unique scope identifier and refers to its parent scope, forming a stack of identifiers. +The parent of the scope of a `Quotes` is the scope of the `Quotes` used to create the enclosing quote. +Top-level splices and `run` create new scope stacks. +Every `Expr` knows in which scope it was created. +When it is spliced, we check that the quote scope is either the same as the splice scope, or a parent scope thereof. -Seen by itself, principled metaprogramming looks more like a framework for -runtime metaprogramming than one for compile-time metaprogramming with macros. -But combined with Scala 3’s `inline` feature it can be turned into a compile-time -system. The idea is that macro elaboration can be understood as a combination of -a macro library and a quoted program. For instance, here’s the `assert` macro -again together with a program that calls `assert`. -```scala -object Macros: +## Staged Lambdas - inline def assert(inline expr: Boolean): Unit = - ${ assertImpl('expr) } +When staging programs in a functional language there are two fundamental abstractions: a staged lambda `Expr[T => U]` and a staging lambda `Expr[T] => Expr[U]`. +The first is a function that will exist in the next stage, whereas the second is a function that exists in the current stage. +It is often convenient to have a mechanism to go from `Expr[T => U]` to `Expr[T] => Expr[U]` and vice versa. - def assertImpl(expr: Expr[Boolean])(using Quotes) = - val failMsg: Expr[String] = Expr("failed assertion: " + expr.show) - '{ if !($expr) then throw new AssertionError($failMsg) } +```scala +def later[T: Type, U: Type](f: Expr[T] => Expr[U]): Expr[T => U] = + '{ (x: T) => ${ f('x) } } -@main def program = - val x = 1 - Macros.assert(x != 0) +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => '{ $f($x) } ``` -Inlining the `assert` function would give the following program: +Both conversions can be performed out of the box with quotes and splices. +But if `f` is a known lambda function, `'{ $f($x) }` will not beta-reduce the lambda in place. +This optimization is performed in a later phase of the compiler. +Not reducing the application immediately can simplify analysis of generated code. +Nevertheless, it is possible to beta-reduce the lambda in place using the `Expr.betaReduce` method. ```scala -@main def program = - val x = 1 - ${ Macros.assertImpl('{ x != 0}) } +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => Expr.betaReduce('{ $f($x) }) ``` -The example is only phase correct because `Macros` is a global value and -as such not subject to phase consistency checking. Conceptually that’s -a bit unsatisfactory. If the PCP is so fundamental, it should be -applicable without the global value exception. But in the example as -given this does not hold since both `assert` and `program` call -`assertImpl` with a splice but no quote. +The `betaReduce` method will beta-reduce the outermost application of the expression if possible (regardless of arity). +If it is not possible to beta-reduce the expression, then it will return the original expression. -However, one could argue that the example is really missing -an important aspect: The macro library has to be compiled in a phase -prior to the program using it, but in the code above, macro -and program are defined together. A more accurate view of -macros would be to have the user program be in a phase after the macro -definitions, reflecting the fact that macros have to be defined and -compiled before they are used. Hence, conceptually the program part -should be treated by the compiler as if it was quoted: +## Staged Constructors +To create new class instances in a later stage, we can create them using factory methods (usually `apply` methods of an `object`), or we can instantiate them with a `new`. +For example, we can write `Some(1)` or `new Some(1)`, creating the same value. +In Scala 3, using the factory method call notation will fall back to a `new` if no `apply` method is found. +We follow the usual staging rules when calling a factory method. +Similarly, when we use a `new C`, the constructor of `C` is implicitly called, which also follows the usual staging rules. +Therefore for an arbitrary known class `C`, we can use both `'{ C(...) }` or `'{ new C(...) }` as constructors. +## Staged Classes +Quoted code can contain any valid expression including local class definitions. +This allows the creation of new classes with specialized implementations. +For example, we can implement a new version of `Runnable` that will perform some optimized operation. ```scala -@main def program = '{ - val x = 1 - ${ Macros.assertImpl('{ x != 0 }) } +def mkRunnable(x: Int)(using Quotes): Expr[Runnable] = '{ + class MyRunnable extends Runnable: + def run(): Unit = ... // generate some custom code that uses `x` + new MyRunnable } ``` -If `program` is treated as a quoted expression, the call to -`Macro.assertImpl` becomes phase correct even if macro library and -program are conceptualized as local definitions. +The quoted class is a local class and its type cannot escape the enclosing quote. +The class must be used inside the quote or an instance of it can be returned using a known interface (`Runnable` in this case). -But what about the call from `assert` to `assertImpl`? Here, we need a -tweak of the typing rules. An inline function such as `assert` that -contains a splice operation outside an enclosing quote is called a -_macro_. Macros are supposed to be expanded in a subsequent phase, -i.e. in a quoted context. Therefore, they are also type checked as if -they were in a quoted context. For instance, the definition of -`assert` is typechecked as if it appeared inside quotes. This makes -the call from `assert` to `assertImpl` phase-correct, even if we -assume that both definitions are local. +## Quote Pattern Matching -The `inline` modifier is used to declare a `val` that is -either a constant or is a parameter that will be a constant when instantiated. This -aspect is also important for macro expansion. - -To get values out of expressions containing constants `Expr` provides the method -`value` (or `valueOrError`). This will convert the `Expr[T]` into a `Some[T]` (or `T`) when the -expression contains value. Otherwise it will return `None` (or emit an error). -To avoid having incidental val bindings generated by the inlining of the `def` -it is recommended to use an inline parameter. To illustrate this, consider an -implementation of the `power` function that makes use of a statically known exponent: +It is sometimes necessary to analyze the structure of the code or decompose the code into its sub-expressions. +A classic example is an embedded DSL, where a macro knows a set of definitions that it can reinterpret while compiling the code (for instance, to perform optimizations). +In the following example, we extend our previous implementation of `powCode` to look into `x` to perform further optimizations. ```scala -inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } - -private def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = - n.value match - case Some(m) => powerCode(x, m) - case None => '{ Math.pow($x, $n.toDouble) } - -private def powerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = - if n == 0 then '{ 1.0 } - else if n == 1 then x - else if n % 2 == 0 then '{ val y = $x * $x; ${ powerCode('y, n / 2) } } - else '{ $x * ${ powerCode(x, n - 1) } } +def fusedPowCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + x match + case '{ power($y, $m) } => // we have (y^m)^n + fusedPowCode(y, '{ $n * $m }) // generate code for y^(n*m) + case _ => + '{ power($x, $n) } ``` -## Scope Extrusion -Quotes and splices are duals as far as the PCP is concerned. But there is an -additional restriction that needs to be imposed on splices to guarantee -soundness: code in splices must be free of side effects. The restriction -prevents code like this: +#### Sub-patterns -```scala -var x: Expr[T] = ... -'{ (y: T) => ${ x = 'y; 1 } } -``` - -This code, if it was accepted, would _extrude_ a reference to a quoted variable -`y` from its scope. This would subsequently allow access to a variable outside the -scope where it is defined, which is likely problematic. The code is clearly -phase consistent, so we cannot use PCP to rule it out. Instead, we postulate a -future effect system that can guarantee that splices are pure. In the absence of -such a system we simply demand that spliced expressions are pure by convention, -and allow for undefined compiler behavior if they are not. This is analogous to -the status of pattern guards in Scala, which are also required, but not -verified, to be pure. - -[Multi-Stage Programming](./staging.md) introduces one additional method where -you can expand code at runtime with a method `run`. There is also a problem with -that invocation of `run` in splices. Consider the following expression: +In quoted patterns, the `$` binds the sub-expression to an expression `Expr` that can be used in that `case` branch. +The contents of `${..}` in a quote pattern are regular Scala patterns. +For example, we can use the `Expr(_)` pattern within the `${..}` to only match if it is a known value and extract it. ```scala -'{ (x: Int) => ${ run('x); 1 } } +def fusedUnrolledPowCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + x match + case '{ power($y, ${Expr(m)}) } => // we have (y^m)^n + fusedUnrolledPowCode(y, n * m) // generate code for y * ... * y + case _ => // ( n*m times ) + unrolledPowerCode(x, n) ``` -This is again phase correct, but will lead us into trouble. Indeed, evaluating -the splice will reduce the expression `run('x)` to `x`. But then the result +These value extraction sub-patterns can be polymorphic using an instance of `FromExpr`. +In the following example, we show the implementation of `OptionFromExpr` which internally uses the `FromExpr[T]` to extract the value using the `Expr(x)` pattern. ```scala -'{ (x: Int) => ${ x; 1 } } +given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with + def unapply(x: Expr[Option[T]])(using Quotes): Option[Option[T]] = + x match + case '{ Some( ${Expr(x)} ) } => Some(Some(x)) + case '{ None } => Some(None) + case _ => None ``` -is no longer phase correct. To prevent this soundness hole it seems easiest to -classify `run` as a side-effecting operation. It would thus be prevented from -appearing in splices. In a base language with side effects we would have to do this -anyway: Since `run` runs arbitrary code it can always produce a side effect if -the code it runs produces one. -## Example Expansion -Assume we have two methods, `foreach` that takes an `Expr[Array[T]]` and a -consumer `f`, and `sum` that performs a sum by delegating to `foreach`. +#### Closed patterns +Patterns may contain two kinds of references: global references such as the call to the `power` method in `'{ power(...) }`, or references to bindings defined in the pattern such as `x` in `case '{ (x: Int) => x }`. +When extracting an expression from a quote, we need to ensure that we do not extrude any variable from the scope where it is defined. ```scala -object Macros: - - def foreach[T](arr: Expr[Array[T]], f: Expr[T] => Expr[Unit]) - (using Type[T], Quotes): Expr[Unit] = '{ - var i: Int = 0 - while i < ($arr).length do - val element: T = ($arr)(i) - ${f('element)} - i += 1 - } - - def sum(arr: Expr[Array[Int]])(using Quotes): Expr[Int] = '{ - var sum = 0 - ${ foreach(arr, x => '{sum += $x}) } - sum - } - - inline def sum_m(arr: Array[Int]): Int = ${sum('arr)} - -end Macros +'{ (x: Int) => x + 1 } match + case '{ (y: Int) => $z } => + // should not match, otherwise: z = '{ x + 1 } ``` -A call to `sum_m(Array(1, 2, 3))` will first inline `sum_m`: - -```scala -val arr: Array[Int] = Array.apply(1, 2, 3) -${ _root_.Macros.sum('arr) } -``` +In this example, we see that the pattern should not match. +Otherwise, any use of the expression `z` would contain an unbound reference to `x`. +To avoid any such extrusion, we only match on a `${..}` if its expression is closed under the definitions within the pattern. +Therefore, the pattern will not match if the expression is not closed. -then it will call `sum`: +#### HOAS patterns +To allow extracting expressions that may contain extruded references we offer a _higher-order abstract syntax_ (HOAS) pattern `$f(y)` (or `$f(y1,...,yn)`). +This pattern will eta-expand the sub-expression with respect to `y` and bind it to `f`. +The lambda arguments will replace the variables that might have been extruded. ```scala -val arr: Array[Int] = Array.apply(1, 2, 3) -${ '{ - var sum = 0 - ${ foreach('arr, x => '{sum += $x}) } - sum -} } +'{ ((x: Int) => x + 1).apply(2) } match + case '{ ((y: Int) => $f(y)).apply($z: Int) } => + // f may contain references to `x` (replaced by `$y`) + // f = (y: Expr[Int]) => '{ $y + 1 } + f(z) // generates '{ 2 + 1 } ``` -and cancel the `${'{...}}`: - -```scala -val arr: Array[Int] = Array.apply(1, 2, 3) - -var sum = 0 -${ foreach('arr, x => '{sum += $x}) } -sum -``` -then it will extract `x => '{sum += $x}` into `f`, to have a value: +A HOAS pattern `$x(y1,...,yn)` will only match the expression if it does not contain references to variables defined in the pattern that are not in the set `y1,...,yn`. +In other words, the pattern will match if the expression only contains references to variables defined in the pattern that are in `y1,...,yn`. +Note that the HOAS patterns `$x()` are semantically equivalent to closed patterns `$x`. -```scala -val arr: Array[Int] = Array.apply(1, 2, 3) -var sum = 0 -val f = x => '{sum += $x} -${ _root_.Macros.foreach('arr, 'f)(Type.of[Int]) } -sum -``` +#### Type variables -and then call `foreach`: +Expressions may contain types that are not statically known. +For example, an `Expr[List[Int]]` may contain `list.map(_.toInt)` where `list` is a `List` of some type. +To cover all the possible cases we would need to explicitly match `list` on all possible types (`List[Int]`, `List[Int => Int]`, ...). +This is an infinite set of types and therefore pattern cases. +Even if we would know all possible types that a specific program could use, we may still end up with an unmanageable number of cases. +To overcome this, we introduce type variables in quoted patterns, which will match any type. +In the following example, we show how type variables `t` and `u` match all possible pairs of consecutive calls to `map` on lists. +In the quoted patterns, types named with lower cases are identified as type variables. +This follows the same notation as type variables used in normal patterns. ```scala -val arr: Array[Int] = Array.apply(1, 2, 3) +def fuseMapCode(x: Expr[List[Int]]): Expr[List[Int]] = + x match + case '{ ($ls: List[t]).map[u]($f).map[Int]($g) } => + '{ $ls.map($g.compose($f)) } + ... -var sum = 0 -val f = x => '{sum += $x} -${ '{ - var i: Int = 0 - while i < arr.length do - val element: Int = (arr)(i) - sum += element - i += 1 - sum -} } +fuseMapCode('{ List(1.2).map(f).map(g) }) // '{ List(1.2).map(g.compose(f)) } +fuseMapCode('{ List('a').map(h).map(i) }) // '{ List('a').map(i.compose(h)) } ``` +Variables `f` and `g` are inferred to be of type `Expr[t => u]` and `Expr[u => Int]` respectively. +Subsequently, we can infer `$g.compose($f)` to be of type `Expr[t => Int]` which is the type of the argument of `$ls.map(..)`. -and cancel the `${'{...}}` again: +Type variables are abstract types that will be erased; this implies that to reference them in the second quote we need a given `Type[t]` and `Type[u]`. +The quoted pattern will implicitly provide those given types. +At run-time, when the pattern matches, the type of `t` and `u` will be known, and the `Type[t]` and `Type[u]` will contain the precise types in the expression. +As `Expr` is covariant, the statically known type of the expression might not be the actual type. +Type variables can also be used to recover the precise type of the expression. ```scala -val arr: Array[Int] = Array.apply(1, 2, 3) +def let(x: Expr[Any])(using Quotes): Expr[Any] = + x match + case '{ $x: t } => + '{ val y: t = $x; y } -var sum = 0 -val f = x => '{sum += $x} -var i: Int = 0 -while i < arr.length do - val element: Int = (arr)(i) - sum += element - i += 1 -sum +let('{1}) // will return a `Expr[Any]` that contains an `Expr[Int]]` ``` -Finally cleanups and dead code elimination: - +While we can define the type variable in the middle of the pattern, their normal form is to define them as a `type` with a lower case name at the start of the pattern. +We use the Scala backquote `` `t` `` naming convention which interprets the string within the backquote as a literal name identifier. +This is typically used when we have names that contain special characters that are not allowed for normal Scala identifiers. +But we use it to explicitly state that this is a reference to that name and not the introduction of a new variable. ```scala -val arr: Array[Int] = Array.apply(1, 2, 3) -var sum = 0 -var i: Int = 0 -while i < arr.length do - val element: Int = arr(i) - sum += element - i += 1 -sum + case '{ type t; $x: `t` } => ``` - -## Find implicits within a macro - -Similarly to the `summonFrom` construct, it is possible to make implicit search available -in a quote context. For this we simply provide `scala.quoted.Expr.summon`: +This is a bit more verbose but has some expressivity advantages such as allowing to define bounds on the variables and be able to refer to them several times in any scope of the pattern. ```scala -import scala.collection.immutable.{ TreeSet, HashSet } -inline def setFor[T]: Set[T] = ${ setForExpr[T] } - -def setForExpr[T: Type](using Quotes): Expr[Set[T]] = - Expr.summon[Ordering[T]] match - case Some(ord) => '{ new TreeSet[T]()($ord) } - case _ => '{ new HashSet[T] } + case '{ type t >: List[Int] <: Seq[Int]; $x: `t` } => + case '{ type t; $x: (`t`, `t`) } => ``` -## Relationship with Transparent Inline -[Inline](./inline.md) documents inlining. The code below introduces a transparent -inline method that can calculate either a value of type `Int` or a value of type -`String`. +#### Type patterns +It is possible to only have a type and no expression of that type. +To be able to inspect a type, we introduce quoted type pattern `case '[..] =>`. +It works the same way as a quoted pattern but is restricted to contain a type. +Type variables can be used in quoted type patterns to extract a type. ```scala -transparent inline def defaultOf(inline str: String) = - ${ defaultOfImpl('str) } - -def defaultOfImpl(strExpr: Expr[String])(using Quotes): Expr[Any] = - strExpr.valueOrError match - case "int" => '{1} - case "string" => '{"a"} - -// in a separate file -val a: Int = defaultOf("int") -val b: String = defaultOf("string") - +def empty[T: Type]: Expr[T] = + Type.of[T] match + case '[String] => '{ "" } + case '[List[t]] => '{ List.empty[t] } + ... ``` -## Defining a macro and using it in a single project +`Type.of[T]` is used to summon the given instance of `Type[T]` in scope, it is equivalent to `summon[Type[T]]`. -It is possible to define macros and use them in the same project as long as the implementation -of the macros does not have run-time dependencies on code in the file where it is used. -It might still have compile-time dependencies on types and quoted code that refers to the use-site file. +#### Type testing and casting +It is important to note that instance checks and casts on `Expr`, such as `isInstanceOf[Expr[T]]` and `asInstanceOf[Expr[T]]`, will only check if the instance is of the class `Expr` but will not be able to check the `T` argument. +These cases will issue a warning at compile-time, but if they are ignored, they can result in unexpected behavior. -To provide this functionality Scala 3 provides a transparent compilation mode where files that -try to expand a macro but fail because the macro has not been compiled yet are suspended. -If there are any suspended files when the compilation ends, the compiler will automatically restart -compilation of the suspended files using the output of the previous (partial) compilation as macro classpath. -In case all files are suspended due to cyclic dependencies the compilation will fail with an error. +These operations can be supported correctly in the system. +For a simple type test it is possible to use the `isExprOf[T]` method of `Expr` to check if it is an instance of that type. +Similarly, it is possible to use `asExprOf[T]` to cast an expression to a given type. +These operations use a given `Type[T]` to work around type erasure. -## Pattern matching on quoted expressions -It is possible to deconstruct or extract values out of `Expr` using pattern matching. +## Sub-Expression Transformation -`scala.quoted` contains objects that can help extracting values from `Expr`. - -- `scala.quoted.Expr`/`scala.quoted.Exprs`: matches an expression of a value (resp. list of values) and returns the value (resp. list of values). -- `scala.quoted.Const`/`scala.quoted.Consts`: Same as `Expr`/`Exprs` but only works on primitive values. -- `scala.quoted.Varargs`: matches an explicit sequence of expressions and returns them. These sequences are useful to get individual `Expr[T]` out of a varargs expression of type `Expr[Seq[T]]`. - -These could be used in the following way to optimize any call to `sum` that has statically known values. +The system provides a mechanism to transform all sub-expressions of an expression. +This is useful when the sub-expressions we want to transform are deep in the expression. +It is also necessary if the expression contains sub-expressions that cannot be matched using quoted patterns (such as local class definitions). ```scala -inline def sum(inline args: Int*): Int = ${ sumExpr('args) } -private def sumExpr(argsExpr: Expr[Seq[Int]])(using Quotes): Expr[Int] = - argsExpr match - case Varargs(args @ Exprs(argValues)) => - // args is of type Seq[Expr[Int]] - // argValues is of type Seq[Int] - Expr(argValues.sum) // precompute result of sum - case Varargs(argExprs) => // argExprs is of type Seq[Expr[Int]] - val staticSum: Int = argExprs.map(_.value.getOrElse(0)).sum - val dynamicSum: Seq[Expr[Int]] = argExprs.filter(_.value.isEmpty) - dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) - case _ => - '{ $argsExpr.sum } - -sum(1, 2, 3) // gets matched by Varargs - -val xs = List(1, 2, 3) -sum(xs*) // doesn't get matched by Varargs +trait ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] + def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + ... ``` -### Quoted patterns - -Quoted pattens allow deconstructing complex code that contains a precise structure, types or methods. -Patterns `'{ ... }` can be placed in any location where Scala expects a pattern. - -For example - -```scala -optimize { - sum(sum(1, a, 2), 3, b) -} // should be optimized to 6 + a + b -``` +Users can extend the `ExprMap` trait and implement the `transform` method. +This interface is flexible and can implement top-down, bottom-up, or other transformations. ```scala -def sum(args: Int*): Int = args.sum -inline def optimize(inline arg: Int): Int = ${ optimizeExpr('arg) } -private def optimizeExpr(body: Expr[Int])(using Quotes): Expr[Int] = - body match - // Match a call to sum without any arguments - case '{ sum() } => Expr(0) - // Match a call to sum with an argument $n of type Int. - // n will be the Expr[Int] representing the argument. - case '{ sum($n) } => n - // Match a call to sum and extracts all its args in an `Expr[Seq[Int]]` - case '{ sum(${Varargs(args)}: _*) } => sumExpr(args) - case body => body - -private def sumExpr(args1: Seq[Expr[Int]])(using Quotes): Expr[Int] = - def flatSumArgs(arg: Expr[Int]): Seq[Expr[Int]] = arg match - case '{ sum(${Varargs(subArgs)}: _*) } => subArgs.flatMap(flatSumArgs) - case arg => Seq(arg) - val args2 = args1.flatMap(flatSumArgs) - val staticSum: Int = args2.map(_.value.getOrElse(0)).sum - val dynamicSum: Seq[Expr[Int]] = args2.filter(_.value.isEmpty) - dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) +object OptimizeIdentity extends ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + transformChildren(e) match // bottom-up transformation + case '{ identity($x) } => x + case _ => e ``` -### Recovering precise types using patterns +The `transformChildren` method is implemented as a primitive that knows how to reach all the direct sub-expressions and calls `transform` on each one. +The type passed to `transform` is the expected type of this sub-expression in its expression. +For example while transforming `Some(1)` in `'{ val x: Option[Int] = Some(1); ...}` the type will be `Option[Int]` and not `Some[Int]`. +This implies that we can safely transform `Some(1)` into `None`. -Sometimes it is necessary to get a more precise type for an expression. This can be achieved using the following pattern match. +## Staged Implicit Summoning +When summoning implicit arguments using `summon`, we will find the given instances in the current scope. +It is possible to use `summon` to get staged implicit arguments by explicitly staging them first. +In the following example, we can pass an implicit `Ordering[T]` in a macro as an `Expr[Ordering[T]]` to its implementation. +Then we can splice it and give it implicitly in the next stage. ```scala -def f(expr: Expr[Any])(using Quotes) = expr match - case '{ $x: t } => - // If the pattern match succeeds, then there is - // some type `t` such that - // - `x` is bound to a variable of type `Expr[t]` - // - `t` is bound to a new type `t` and a given - // instance `Type[t]` is provided for it - // That is, we have `x: Expr[t]` and `given Type[t]`, - // for some (unknown) type `t`. -``` - -This might be used to then perform an implicit search as in: - -```scala -extension (inline sc: StringContext) - inline def showMe(inline args: Any*): String = ${ showMeExpr('sc, 'args) } - -private def showMeExpr(sc: Expr[StringContext], argsExpr: Expr[Seq[Any]])(using Quotes): Expr[String] = - import quotes.reflect.report - argsExpr match - case Varargs(argExprs) => - val argShowedExprs = argExprs.map { - case '{ $arg: tp } => - Expr.summon[Show[tp]] match - case Some(showExpr) => - '{ $showExpr.show($arg) } - case None => - report.error(s"could not find implicit for ${Type.show[Show[tp]]}", arg); '{???} - } - val newArgsExpr = Varargs(argShowedExprs) - '{ $sc.s($newArgsExpr: _*) } - case _ => - // `new StringContext(...).showMeExpr(args: _*)` not an explicit `showMeExpr"..."` - report.error(s"Args must be explicit", argsExpr) - '{???} - -trait Show[-T]: - def show(x: T): String - -// in a different file -given Show[Boolean] with - def show(b: Boolean) = "boolean!" +inline def treeSetFor[T](using ord: Ordering[T]): Set[T] = + ${ setExpr[T](using 'ord) } -println(showMe"${true}") +def setExpr[T:Type](using ord: Expr[Ordering[T]])(using Quotes): Expr[Set[T]] = + '{ given Ordering[T] = $ord; new TreeSet[T]() } ``` -### Open code patterns +We pass it as an implicit `Expr[Ordering[T]]` because there might be intermediate methods that can pass it along implicitly. -Quoted pattern matching also provides higher-order patterns to match open terms. If a quoted term contains a definition, -then the rest of the quote can refer to this definition. +An alternative is to summon implicit values in the scope where the macro is invoked. +Using the `Expr.summon` method we get an optional expression containing the implicit instance. +This provides the ability to search for implicit instances conditionally. ```scala -'{ - val x: Int = 4 - x * x -} -``` - -To match such a term we need to match the definition and the rest of the code, but we need to explicitly state that the rest of the code may refer to this definition. - -```scala -case '{ val y: Int = $x; $body(y): Int } => +def summon[T: Type](using Quotes): Option[Expr[T]] ``` -Here `$x` will match any closed expression while `$body(y)` will match an expression that is closed under `y`. Then -the subexpression of type `Expr[Int]` is bound to `body` as an `Expr[Int => Int]`. The extra argument represents the references to `y`. Usually this expression is used in combination with `Expr.betaReduce` to replace the extra argument. - ```scala -inline def eval(inline e: Int): Int = ${ evalExpr('e) } +inline def setFor[T]: Set[T] = + ${ setForExpr[T] } -private def evalExpr(e: Expr[Int])(using Quotes): Expr[Int] = e match - case '{ val y: Int = $x; $body(y): Int } => - // body: Expr[Int => Int] where the argument represents - // references to y - evalExpr(Expr.betaReduce('{$body(${evalExpr(x)})})) - case '{ ($x: Int) * ($y: Int) } => - (x.value, y.value) match - case (Some(a), Some(b)) => Expr(a * b) - case _ => e - case _ => e +def setForExpr[T: Type]()(using Quotes): Expr[Set[T]] = + Expr.summon[Ordering[T]] match + case Some(ord) => + '{ new TreeSet[T]()($ord) } + case _ => + '{ new HashSet[T] } ``` -```scala -eval { // expands to the code: (16: Int) - val x: Int = 4 - x * x -} -``` +## More details -We can also close over several bindings using `$b(a1, a2, ..., an)`. -To match an actual application we can use braces on the function part `${b}(a1, a2, ..., an)`. +* [Specification](./macros-spec.md) +* Scalable Metaprogramming in Scala 3[^1] -## More details -[More details](./macros-spec.md) +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) +[^3]: Implemented in the Scala 3 Dotty project https://github.com/lampepfl/dotty. sbt library dependency `"org.scala-lang" %% "scala3-staging" % scalaVersion.value` +[^4]: Using the `-Xcheck-macros` compiler flag diff --git a/docs/_docs/reference/metaprogramming/simple-smp.md b/docs/_docs/reference/metaprogramming/simple-smp.md index 2ba0155ad329..61b062f55b87 100644 --- a/docs/_docs/reference/metaprogramming/simple-smp.md +++ b/docs/_docs/reference/metaprogramming/simple-smp.md @@ -23,7 +23,7 @@ replace evaluation contexts with contextual typing rules. While this is more verbose, it makes it easier to set up the meta theory. ## Syntax -``` +```ebnf Terms t ::= x variable (x: T) => t lambda t t application diff --git a/docs/_docs/reference/metaprogramming/staging.md b/docs/_docs/reference/metaprogramming/staging.md index e74d491402b5..1c154e09f50e 100644 --- a/docs/_docs/reference/metaprogramming/staging.md +++ b/docs/_docs/reference/metaprogramming/staging.md @@ -1,6 +1,6 @@ --- layout: doc-page -title: "Runtime Multi-Stage Programming" +title: "Run-Time Multi-Stage Programming" nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/staging.html --- @@ -60,7 +60,7 @@ impose the following restrictions on the use of splices. The framework as discussed so far allows code to be staged, i.e. be prepared to be executed at a later stage. To run that code, there is another method in class `Expr` called `run`. Note that `$` and `run` both map from `Expr[T]` -to `T` but only `$` is subject to the [PCP](./macros.md#the-phase-consistency-principle), whereas `run` is just a normal method. +to `T` but only `$` is subject to [Cross-Stage Safety](./macros.md#cross-stage-safety), whereas `run` is just a normal method. `scala.quoted.staging.run` provides a `Quotes` that can be used to show the expression in its scope. On the other hand `scala.quoted.staging.withQuotes` provides a `Quotes` without evaluating the expression. diff --git a/docs/_docs/reference/new-types/dependent-function-types-spec.md b/docs/_docs/reference/new-types/dependent-function-types-spec.md index f3237ddf7b9a..f603200b1ae0 100644 --- a/docs/_docs/reference/new-types/dependent-function-types-spec.md +++ b/docs/_docs/reference/new-types/dependent-function-types-spec.md @@ -8,7 +8,7 @@ Initial implementation in [PR #3464](https://github.com/lampepfl/dotty/pull/3464 ## Syntax -``` +```ebnf FunArgTypes ::= InfixType | ‘(’ [ FunArgType {',' FunArgType } ] ‘)’ | ‘(’ TypedFunParam {',' TypedFunParam } ‘)’ diff --git a/docs/_docs/reference/new-types/intersection-types-spec.md b/docs/_docs/reference/new-types/intersection-types-spec.md index 346c57c004f0..8d332fc6ed29 100644 --- a/docs/_docs/reference/new-types/intersection-types-spec.md +++ b/docs/_docs/reference/new-types/intersection-types-spec.md @@ -12,7 +12,7 @@ with the usual precedence and subject to usual resolving rules. Unless shadowed by another definition, it resolves to the type `scala.&`, which acts as a type alias to an internal representation of intersection types. -``` +```ebnf Type ::= ...| InfixType InfixType ::= RefinedType {id [nl] RefinedType} ``` diff --git a/docs/_docs/reference/new-types/type-lambdas-spec.md b/docs/_docs/reference/new-types/type-lambdas-spec.md index 76937e5160f7..7f7053a13ddd 100644 --- a/docs/_docs/reference/new-types/type-lambdas-spec.md +++ b/docs/_docs/reference/new-types/type-lambdas-spec.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/type-lambdas-s ## Syntax -``` +```ebnf Type ::= ... | TypeParamClause ‘=>>’ Type TypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ TypeParam ::= {Annotation} (id [HkTypeParamClause] | ‘_’) TypeBounds diff --git a/docs/_docs/reference/new-types/union-types-spec.md b/docs/_docs/reference/new-types/union-types-spec.md index d250d3f11713..1093631e7c63 100644 --- a/docs/_docs/reference/new-types/union-types-spec.md +++ b/docs/_docs/reference/new-types/union-types-spec.md @@ -72,6 +72,10 @@ a non-union type, for this purpose we define the _join_ of a union type `T1 | `T1`,...,`Tn`. Note that union types might still appear as type arguments in the resulting type, this guarantees that the join is always finite. +The _visible join_ of a union type is its join where all operands of the intersection that +are instances of [transparent](../other-new-features/transparent-traits.md) traits or classes are removed. + + ### Example Given @@ -80,31 +84,50 @@ Given trait C[+T] trait D trait E -class A extends C[A] with D -class B extends C[B] with D with E +transparent trait X +class A extends C[A], D, X +class B extends C[B], D, E, X ``` -The join of `A | B` is `C[A | B] & D` +The join of `A | B` is `C[A | B] & D & X` and the visible join of `A | B` is `C[A | B] & D`. + +## Hard and Soft Union Types + +We distinguish between hard and soft union types. A _hard_ union type is a union type that's explicitly +written in the source. For instance, in +```scala +val x: Int | String = ... +``` +`Int | String` would be a hard union type. A _soft_ union type is a type that arises from type checking +an alternative of expressions. For instance, the type of the expression +```scala +val x = 1 +val y = "abc" +if cond then x else y +``` +is the soft unon type `Int | String`. Similarly for match expressions. The type of +```scala +x match + case 1 => x + case 2 => "abc" + case 3 => List(1, 2, 3) +``` +is the soft union type `Int | "abc" | List[Int]`. + ## Type inference When inferring the result type of a definition (`val`, `var`, or `def`) and the -type we are about to infer is a union type, then we replace it by its join. +type we are about to infer is a soft union type, then we replace it by its visible join, +provided it is not empty. Similarly, when instantiating a type argument, if the corresponding type parameter is not upper-bounded by a union type and the type we are about to -instantiate is a union type, we replace it by its join. This mirrors the +instantiate is a soft union type, we replace it by its visible join, provided it is not empty. +This mirrors the treatment of singleton types which are also widened to their underlying type unless explicitly specified. The motivation is the same: inferring types which are "too precise" can lead to unintuitive typechecking issues later on. -**Note:** Since this behavior limits the usability of union types, it might -be changed in the future. For example by not widening unions that have been -explicitly written down by the user and not inferred, or by not widening a type -argument when the corresponding type parameter is covariant. - -See [PR #2330](https://github.com/lampepfl/dotty/pull/2330) and -[Issue #4867](https://github.com/lampepfl/dotty/issues/4867) for further discussions. - ### Example ```scala diff --git a/docs/_docs/reference/new-types/union-types.md b/docs/_docs/reference/new-types/union-types.md index 76c0ac6e674c..3729cbf09848 100644 --- a/docs/_docs/reference/new-types/union-types.md +++ b/docs/_docs/reference/new-types/union-types.md @@ -8,8 +8,9 @@ A union type `A | B` includes all values of both types. ```scala -case class UserName(name: String) -case class Password(hash: Hash) +trait ID +case class UserName(name: String) extends ID +case class Password(hash: Hash) extends ID def help(id: UserName | Password) = val user = id match @@ -22,7 +23,10 @@ Union types are duals of intersection types. `|` is _commutative_: `A | B` is the same type as `B | A`. The compiler will assign a union type to an expression only if such a -type is explicitly given. This can be seen in the following [REPL](https://docs.scala-lang.org/overviews/repl/overview.html) transcript: +type is explicitly given or if the common supertype of all alternatives is [transparent](../other-new-features/transparent-traits.md). + + +This can be seen in the following [REPL](https://docs.scala-lang.org/overviews/repl/overview.html) transcript: ```scala scala> val password = Password(123) @@ -32,15 +36,36 @@ scala> val name = UserName("Eve") val name: UserName = UserName(Eve) scala> if true then name else password -val res2: Object = UserName(Eve) +val res1: ID = UserName(Eve) scala> val either: Password | UserName = if true then name else password -val either: Password | UserName = UserName(Eve) +val either: UserName | Password = UserName(Eve) ``` - -The type of `res2` is `Object & Product`, which is a supertype of -`UserName` and `Password`, but not the least supertype `Password | -UserName`. If we want the least supertype, we have to give it +The type of `res1` is `ID`, which is a supertype of +`UserName` and `Password`, but not the least supertype `UserName | Password`. +If we want the least supertype, we have to give it explicitly, as is done for the type of `either`. +The inference behavior changes if the common supertrait `ID` is declared `transparent`: +```scala +transparent trait ID +``` +In that case the union type is not widened. +```scala +scala> if true then name else password +val res2: UserName | Password = UserName(Eve) +``` +The more precise union type is also inferred if `UserName` and `Password` are declared without an explicit +parent, since in that case their implied superclass is `Object`, which is among the classes that are +assumed to be transparent. See [Transparent Traits and Classes](../other-new-features/transparent-traits.md) +for a list of such classes. +```scala +case class UserName(name: String) +case class Password(hash: Hash) + +scala> if true then UserName("Eve") else Password(123) +val res3: UserName | Password = UserName(Eve) +``` + + [More details](./union-types-spec.md) diff --git a/docs/_docs/reference/other-new-features/creator-applications.md b/docs/_docs/reference/other-new-features/creator-applications.md index 81f09d897955..8b1de02b2f25 100644 --- a/docs/_docs/reference/other-new-features/creator-applications.md +++ b/docs/_docs/reference/other-new-features/creator-applications.md @@ -47,8 +47,12 @@ be selected with `apply` (or be applied to arguments, in which case the `apply` inserted). Constructor proxies are also not allowed to shadow normal definitions. That is, -if an identifier resolves to a constructor proxy, and the same identifier is also -defined or imported in some other scope, an ambiguity is reported. +an ambiguity is reported, if + + - an identifier resolves to a constructor proxy, + - the same identifier is also defined or imported in some other scope, + - the other reference can be applied to a (possibly empty) parameter list. That + is, it refers either to a method or to a value containing an apply method as member. ## Motivation diff --git a/docs/_docs/reference/other-new-features/experimental-defs.md b/docs/_docs/reference/other-new-features/experimental-defs.md index 225b61161652..88815ad1e136 100644 --- a/docs/_docs/reference/other-new-features/experimental-defs.md +++ b/docs/_docs/reference/other-new-features/experimental-defs.md @@ -216,7 +216,7 @@ Experimental definitions can only be referenced in an experimental scope. Experi
Example 1 - + ```scala import scala.annotation.experimental @@ -242,7 +242,7 @@ Experimental definitions can only be referenced in an experimental scope. Experi } } ``` - +
5. Annotations of an experimental definition are in experimental scopes. Examples: @@ -270,13 +270,6 @@ Can use the `-Yno-experimental` compiler flag to disable it and run as a proper In any other situation, a reference to an experimental definition will cause a compilation error. -## Experimental inheritance - -All subclasses of an experimental `class` or `trait` must be marked as [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) even if they are in an experimental scope. -Anonymous classes and SAMs of experimental classes are considered experimental. - -We require explicit annotations to make sure we do not have completion or cycles issues with nested classes. This restriction could be relaxed in the future. - ## Experimental overriding For an overriding member `M` and overridden member `O`, if `O` is non-experimental then `M` must be non-experimental. diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md index 41104a54e4a6..e8482cb343d9 100644 --- a/docs/_docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -176,7 +176,7 @@ extension (x: String) ## Syntax changes: -``` +```ebnf TemplateStat ::= ... | Export TopStat ::= ... diff --git a/docs/_docs/reference/other-new-features/indentation.md b/docs/_docs/reference/other-new-features/indentation.md index f60d2d462c82..9963d1ee7577 100644 --- a/docs/_docs/reference/other-new-features/indentation.md +++ b/docs/_docs/reference/other-new-features/indentation.md @@ -100,7 +100,7 @@ There are two rules: - An `` is finally inserted in front of a comma that follows a statement sequence starting with an `` if the indented region is itself enclosed in parentheses. -It is an error if the indentation width of the token following an `` does not match the indentation of some previous line in the enclosing indentation region. For instance, the following would be rejected. +It is generally an error if the indentation width of the token following an `` does not match the indentation of some previous line in the enclosing indentation region. For instance, the following would be rejected. ```scala if x < 0 then @@ -109,6 +109,19 @@ if x < 0 then x ``` +However, there is one exception to this rule: If the next line starts with a '`.`' _and_ the indentation +width is different from the indentation widths of the two neighboring regions by more than a single space, the line accepted. For instance, the following is OK: + +```scala +xs.map: x => + x + 1 + .filter: x => + x > 0 +``` +Here, the line starting with `.filter` does not have an indentation level matching a previous line, +but it is still accepted since it starts with a '`.`' and differs in at least two spaces from the +indentation levels of both the region that is closed and the next outer region. + Indentation tokens are only inserted in regions where newline statement separators are also inferred: at the top-level, inside braces `{...}`, but not inside parentheses `(...)`, patterns or types. @@ -174,18 +187,72 @@ The syntax changes allowing this are as follows: Define for an arbitrary sequence of tokens or non-terminals `TS`: -``` +```ebnf :<<< TS >>> ::= ‘{’ TS ‘}’ | ``` Then the grammar changes as follows: -``` +```ebnf TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> Packaging ::= ‘package’ QualId :<<< TopStats >>> ``` +## Optional Braces for Method Arguments + +Starting with Scala 3.3, a `` token is also recognized where a function argument would be expected. Examples: + +```scala +times(10): + println("ah") + println("ha") +``` + +or + +```scala +credentials `++`: + val file = Path.userHome / ".credentials" + if file.exists + then Seq(Credentials(file)) + else Seq() +``` + +or + +```scala +xs.map: + x => + val y = x - 1 + y * y +``` +What's more, a `:` in these settings can also be followed on the same line by the parameter part and arrow of a lambda. So the last example could be compressed to this: + +```scala +xs.map: x => + val y = x - 1 + y * y +``` +and the following would also be legal: +```scala +xs.foldLeft(0): (x, y) => + x + y +``` + +The grammar changes for optional braces around arguments are as follows. + +```ebnf +SimpleExpr ::= ... + | SimpleExpr ColonArgument +InfixExpr ::= ... + | InfixExpr id ColonArgument +ColonArgument ::= colon [LambdaStart] + indent (CaseClauses | Block) outdent +LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + | HkTypeParamClause ‘=>’ +``` + ## Spaces vs Tabs Indentation prefixes can consist of spaces and/or tabs. Indentation widths are the indentation prefixes themselves, ordered by the string prefix relation. So, so for instance "2 tabs, followed by 4 spaces" is strictly less than "2 tabs, followed by 5 spaces", but "2 tabs, followed by 4 spaces" is incomparable to "6 tabs" or to "4 spaces, followed by 2 tabs". It is an error if the indentation width of some line is incomparable with the indentation width of the region that's current at that point. To avoid such errors, it is a good idea not to mix spaces and tabs in the same source file. @@ -207,7 +274,8 @@ Indentation can be mixed freely with braces `{...}`, as well as brackets `[...]` For instance, consider: ```scala { - val x = f(x: Int, y => + val x = 4 + f(x: Int, y => x * ( y + 1 ) + @@ -216,13 +284,13 @@ For instance, consider: ) } ``` - - Here, the indentation width of the region enclosed by the braces is 3 (i.e. the indentation width of the + - Here, the indentation width of the region enclosed by the braces is 2 (i.e. the indentation width of the statement starting with `val`). - - The indentation width of the region in parentheses that follows `f` is also 3, since the opening + - The indentation width of the region in parentheses that follows `f` is also 2, since the opening parenthesis is not at the end of a line. - - The indentation width of the region in parentheses around `y + 1` is 9 + - The indentation width of the region in parentheses around `y + 1` is 6 (i.e. the indentation width of `y + 1`). - - Finally, the indentation width of the last region in parentheses starting with `(x` is 6 (i.e. the indentation width of the indented region following the `=>`. + - Finally, the indentation width of the last region in parentheses starting with `(x` is 4 (i.e. the indentation width of the indented region following the `=>`. ## Special Treatment of Case Clauses @@ -376,7 +444,7 @@ If none of these criteria apply, it's often better to not use an end marker sinc ### Syntax -``` +```ebnf EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ @@ -448,62 +516,3 @@ indented regions where possible. When invoked with options `-rewrite -no-indent` The `-indent` option only works on [new-style syntax](./control-syntax.md). So to go from old-style syntax to new-style indented code one has to invoke the compiler twice, first with options `-rewrite -new-syntax`, then again with options `-rewrite -indent`. To go in the opposite direction, from indented code to old-style syntax, it's `-rewrite -no-indent`, followed by `-rewrite -old-syntax`. -## Variant: Indentation Marker `:` for Arguments - -Generally, the possible indentation regions coincide with those regions where braces `{...}` are also legal, no matter whether the braces enclose an expression or a set of definitions. There is one exception, though: Arguments to functions can be enclosed in braces but they cannot be simply indented instead. Making indentation always significant for function arguments would be too restrictive and fragile. - -To allow such arguments to be written without braces, a variant of the indentation scheme is implemented under language import -```scala -import language.experimental.fewerBraces -``` -In this variant, a `` token is also recognized where function argument would be expected. Examples: - -```scala -times(10): - println("ah") - println("ha") -``` - -or - -```scala -credentials `++`: - val file = Path.userHome / ".credentials" - if file.exists - then Seq(Credentials(file)) - else Seq() -``` - -or - -```scala -xs.map: - x => - val y = x - 1 - y * y -``` -What's more, a `:` in these settings can also be followed on the same line by the parameter part and arrow of a lambda. So the last example could be compressed to this: - -```scala -xs.map: x => - val y = x - 1 - y * y -``` -and the following would also be legal: -```scala -xs.foldLeft(0): (x, y) => - x + y -``` - -The grammar changes for this variant are as follows. - -``` -SimpleExpr ::= ... - | SimpleExpr ColonArgument -InfixExpr ::= ... - | InfixExpr id ColonArgument -ColonArgument ::= colon [LambdaStart] - indent (CaseClauses | Block) outdent -LambdaStart ::= FunParams (‘=>’ | ‘?=>’) - | HkTypeParamClause ‘=>’ -``` diff --git a/docs/_docs/reference/other-new-features/opaques-details.md b/docs/_docs/reference/other-new-features/opaques-details.md index 87e56e240481..d285ec8e8325 100644 --- a/docs/_docs/reference/other-new-features/opaques-details.md +++ b/docs/_docs/reference/other-new-features/opaques-details.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/opaqu ## Syntax -``` +```ebnf Modifier ::= ... | ‘opaque’ ``` diff --git a/docs/_docs/reference/other-new-features/open-classes.md b/docs/_docs/reference/other-new-features/open-classes.md index 764c234df599..10af6ead669e 100644 --- a/docs/_docs/reference/other-new-features/open-classes.md +++ b/docs/_docs/reference/other-new-features/open-classes.md @@ -77,4 +77,4 @@ A class that is neither `abstract` nor `open` is similar to a `sealed` class: it ## Migration -`open` is a new modifier in Scala 3. To allow cross compilation between Scala 2.13 and Scala 3.0 without warnings, the feature warning for ad-hoc extensions is produced only under `-source future`. It will be produced by default from Scala 3.1 on. +`open` is a new modifier in Scala 3. To allow cross compilation between Scala 2.13 and Scala 3.0 without warnings, the feature warning for ad-hoc extensions is produced only under `-source future`. It will be produced by default [from Scala 3.4 on](https://github.com/lampepfl/dotty/issues/16334). diff --git a/docs/_docs/reference/other-new-features/transparent-traits.md b/docs/_docs/reference/other-new-features/transparent-traits.md index 699ce0b9ddd8..b930ffbfde00 100644 --- a/docs/_docs/reference/other-new-features/transparent-traits.md +++ b/docs/_docs/reference/other-new-features/transparent-traits.md @@ -1,6 +1,6 @@ --- layout: doc-page -title: "Transparent Traits" +title: "Transparent Traits and Classes" nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/transparent-traits.html --- @@ -20,12 +20,13 @@ val x = Set(if condition then Val else Var) Here, the inferred type of `x` is `Set[Kind & Product & Serializable]` whereas one would have hoped it to be `Set[Kind]`. The reasoning for this particular type to be inferred is as follows: -- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. -- A union type is widened in type inference to the least supertype that is not a union type. - In the example, this type is `Kind & Product & Serializable` since all three traits are traits of both `Val` and `Var`. +- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. This union type is treated as "soft", which means it was not explicitly written in the source program, but came from forming an upper bound of the types of +some alternatives. +- A soft union type is widened in type inference to the least product of class or trait types that is a supertype of the union type. + In the example, this type is `Kind & Product & Serializable` since all three traits are super-traits of both `Val` and `Var`. So that type becomes the inferred element type of the set. -Scala 3 allows one to mark a mixin trait as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: +Scala 3 allows one to mark a trait or class as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: ```scala transparent trait S @@ -38,13 +39,40 @@ val x = Set(if condition then Val else Var) Now `x` has inferred type `Set[Kind]`. The common transparent trait `S` does not appear in the inferred type. -## Transparent Traits +In the previous example, one could also declare `Kind` as `transparent`: +```scala +transparent trait Kind +``` +The widened union type of `if condition then Val else Var` would then +_only_ contain the transparent traits `Kind` and `S`. In this case, +the widening is not performed at all, so `x` would have type `Set[Val | Var]`. + +The root classes and traits `Any`, `AnyVal`, `Object`, and `Matchable` are +considered to be transparent. This means that an expression such +as +```scala +if condition then 1 else "hello" +``` +will have type `Int | String` instead of the widened type `Any`. + -The traits [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.html), [`java.io.Serializable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/Serializable.html) and [`java.lang.Comparable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Comparable.html) -are treated automatically as transparent. Other traits are turned into transparent traits using the modifier `transparent`. Scala 2 traits can also be made transparent -by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interoperability is no longer needed. -Typically, transparent traits are traits +## Which Traits and Classes Are Transparent? + +Traits and classes are declared transparent by adding the modifier `transparent`. Scala 2 traits and classes can also be declared transparent by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interoperability is no longer needed. + +The following classes and traits are automatically treated as transparent: +```scala + scala.Any + scala.AnyVal + scala.Matchable + scala.Product + java.lang.Object + java.lang.Comparable + java.io.Serializable +``` + +Typically, transparent types other than the root classes are traits that influence the implementation of inheriting classes and traits that are not usually used as types by themselves. Two examples from the standard collection library are: - [`IterableOps`](https://scala-lang.org/api/3.x/scala/collection/IterableOps.html), which provides method implementations for an [`Iterable`](https://scala-lang.org/api/3.x/scala/collection/Iterable.html). @@ -55,7 +83,10 @@ declared transparent. ## Rules for Inference -Transparent traits can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference say that transparent traits are dropped from intersections where possible. +Transparent traits and classes can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference imply the following. + + - Transparent traits are dropped from intersections where possible. + - Union types are not widened if widening would result in only transparent supertypes. The precise rules are as follows: @@ -63,8 +94,8 @@ The precise rules are as follows: - where that type is not higher-kinded, - and where `B` is its known upper bound or `Any` if none exists: - If the type inferred so far is of the form `T1 & ... & Tn` where - `n >= 1`, replace the maximal number of transparent `Ti`s by `Any`, while ensuring that + `n >= 1`, replace the maximal number of transparent traits `Ti`s by `Any`, while ensuring that the resulting type is still a subtype of the bound `B`. -- However, do not perform this widening if all transparent traits `Ti` can get replaced in that way. +- However, do not perform this widening if all types `Ti` can get replaced in that way. This clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. -The last clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. +- If the original type was a is union type that got widened in a previous step to a product consisting only of transparent traits and classes, keep the original union type instead of its widened form. \ No newline at end of file diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index e11629c8eaf9..a705c5a3fd79 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -21,51 +21,48 @@ productions map to AST nodes. The following description of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. -_Unicode escapes_ are used to represent the [Unicode character](https://www.w3.org/International/articles/definitions-characters/) with the given -hexadecimal code: - -``` -UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit -hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ -``` - Informal descriptions are typeset as `“some comment”`. ## Lexical Syntax -The lexical syntax of Scala is given by the following grammar in EBNF -form. +The lexical syntax of Scala is given by the following grammar in EBNF form: -``` +```ebnf whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ -upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” -lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” -letter ::= upper | lower “… and Unicode categories Lo, Lt, Nl” -digit ::= ‘0’ | … | ‘9’ +upper ::= ‘A’ | ... | ‘Z’ | ‘$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that doesn't have + contributory property Other_Lowercase +lower ::= ‘a’ | ... | ‘z’ | ‘_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase +letter ::= upper | lower +digit ::= ‘0’ | ... | ‘9’ paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ - “… and Unicode categories Sm, So” -printableChar ::= “all characters in [\u0020, \u007E] inclusive” + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) +escapeSeq ::= UnicodeEscape | charEscapeSeq op ::= opchar {opchar} varid ::= lower idrest -alphaid ::= upper idrest - | varid +boundvarid ::= varid + | ‘`’ varid ‘`’ plainid ::= alphaid | op id ::= plainid - | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] -decimalNumeral ::= ‘0’ | nonZeroDigit [{digit | ‘_’} digit] +decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] -nonZeroDigit ::= ‘1’ | … | ‘9’ floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] @@ -76,25 +73,25 @@ floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ booleanLiteral ::= ‘true’ | ‘false’ -characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ stringLiteral ::= ‘"’ {stringElement} ‘"’ | ‘"""’ multiLineChars ‘"""’ -stringElement ::= printableChar \ (‘"’ | ‘\’) - | UnicodeEscape - | charEscapeSeq -multiLineChars ::= {[‘"’] [‘"’] char \ ‘"’} {‘"’} -processedStringLiteral - ::= alphaid ‘"’ {[‘\’] processedStringPart | ‘\\’ | ‘\"’} ‘"’ - | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ -processedStringPart +stringElement ::= charNoDoubleQuoteOrNewline + | escapeSeq +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} + +interpolatedString + ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape -escape ::= ‘$$’ - | ‘$’ letter { letter | digit } - | ‘{’ Block [‘;’ whiteSpace stringFormat whiteSpace] ‘}’ -stringFormat ::= {printableChar \ (‘"’ | ‘}’ | ‘ ’ | ‘\t’ | ‘\n’)} - -symbolLiteral ::= ‘'’ plainid // until 2.13 +escape ::= ‘\$\$’ + | ‘\$"’ + | ‘\$’ alphaid + | ‘\$’ BlockExpr +alphaid ::= upper idrest + | varid comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ | ‘//’ “any sequence of characters up to end of line” @@ -105,7 +102,10 @@ semi ::= ‘;’ | nl {nl} ## Optional Braces -The lexical analyzer also inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). +The principle of optional braces is that any keyword that can be followed by `{` can also be followed by an indented block, without needing an intervening `:`. +(Allowing an optional `:` would be counterproductive since it would introduce several ways to do the same thing.) + +The lexical analyzer inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). In the context-free productions below we use the notation `<<< ts >>>` to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. Analogously, the @@ -160,7 +160,7 @@ SimpleLiteral ::= [‘-’] integerLiteral | characterLiteral | stringLiteral Literal ::= SimpleLiteral - | processedStringLiteral + | interpolatedStringLiteral | symbolLiteral | ‘null’ @@ -249,6 +249,7 @@ Catches ::= ‘catch’ (Expr | ExprCaseClause) PostfixExpr ::= InfixExpr [id] -- only if language.postfixOperators is enabled InfixExpr ::= PrefixExpr | InfixExpr id [nl] InfixExpr + | InfixExpr id ColonArgument | InfixExpr MatchClause MatchClause ::= ‘match’ <<< CaseClauses >>> PrefixExpr ::= [PrefixOperator] SimpleExpr @@ -267,6 +268,11 @@ SimpleExpr ::= SimpleRef | SimpleExpr ‘.’ MatchClause | SimpleExpr TypeArgs | SimpleExpr ArgumentExprs + | SimpleExpr ColonArgument +ColonArgument ::= colon [LambdaStart] + indent (CaseClauses | Block) outdent +LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + | HkTypeParamClause ‘=>’ Quoted ::= ‘'’ ‘{’ Block ‘}’ | ‘'’ ‘[’ Type ‘]’ ExprSplice ::= spliceId -- if inside quoted block @@ -306,7 +312,10 @@ TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } TypeCaseClause ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi] Pattern ::= Pattern1 { ‘|’ Pattern1 } -Pattern1 ::= Pattern2 [‘:’ RefinedType] +Pattern1 ::= PatVar ‘:’ RefinedType + | [‘-’] integerLiteral ‘:’ RefinedType + | [‘-’] floatingPointLiteral ‘:’ RefinedType + | Pattern2 Pattern2 ::= [id ‘@’] InfixPattern [‘*’] InfixPattern ::= SimplePattern { id [nl] SimplePattern } SimplePattern ::= PatVar @@ -329,9 +338,6 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeParamBounds -DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds - TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -343,13 +349,20 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param -Param ::= id ‘:’ ParamType [‘=’ Expr] -DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] -DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause -UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | FunArgTypes) ‘)’ -DefParams ::= DefParam {‘,’ DefParam} -DefParam ::= {Annotation} [‘inline’] Param +TypelessClauses ::= TypelessClause {TypelessClause} +TypelessClause ::= DefTermParamClause + | UsingParamClause + +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ +DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ + +DefTermParams ::= DefTermParam {‘,’ DefTermParam} +DefTermParam ::= {Annotation} [‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] ``` ### Bindings and Imports @@ -400,8 +413,8 @@ Dcl ::= RefineDcl ValDcl ::= ids ‘:’ Type VarDcl ::= ids ‘:’ Type DefDcl ::= DefSig ‘:’ Type -DefSig ::= id [DefTypeParamClause] DefParamClauses -TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] +DefSig ::= id [DefTypeParamClause] [TypelessClauses] [DefImplicitClause] +TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds Def ::= ‘val’ PatDef | ‘var’ PatDef @@ -411,7 +424,7 @@ Def ::= ‘val’ PatDef PatDef ::= ids [‘:’ Type] ‘=’ Expr | Pattern2 [‘:’ Type] ‘=’ Expr DefDef ::= DefSig [‘:’ Type] ‘=’ Expr - | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr + | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef @@ -423,10 +436,10 @@ ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefParamClause`, `UsingParamClause` must be present +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} - ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods + ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef | Export diff --git a/docs/_layouts/base.html b/docs/_layouts/base.html index 62823d08c751..bc8b3ab26bb0 100644 --- a/docs/_layouts/base.html +++ b/docs/_layouts/base.html @@ -1,9 +1,6 @@ ---- -extraCSS: - - css/color-brewer.css ---- + {{ content }} diff --git a/docs/_layouts/blog-page.html b/docs/_layouts/blog-page.html index c5d0fe8875e7..7d1a7439f68a 100644 --- a/docs/_layouts/blog-page.html +++ b/docs/_layouts/blog-page.html @@ -5,14 +5,25 @@

{{ page.title }}

{% if page.subTitle %}
@@ -22,14 +33,4 @@

{{ page.title }}

{{ content }} - - {% if page.author and page.authorImg %} -
-
- - - {{ page.author }} - -
- {% endif %}
diff --git a/docs/_spec/01-lexical-syntax.md b/docs/_spec/01-lexical-syntax.md new file mode 100644 index 000000000000..de11de10402f --- /dev/null +++ b/docs/_spec/01-lexical-syntax.md @@ -0,0 +1,567 @@ +--- +title: Lexical Syntax +layout: default +chapter: 1 +--- + +# Lexical Syntax + +Scala source code consists of Unicode text. + +The program text is tokenized as described in this chapter. +See the last section for special support for XML literals, which are parsed in _XML mode_. + +To construct tokens, characters are distinguished according to the following classes (Unicode general category given in parentheses): + +1. Whitespace characters. `\u0020 | \u0009 | \u000D | \u000A`. +1. Letters, which include lower case letters (`Ll`), upper case letters (`Lu`), title case letters (`Lt`), other letters (`Lo`), modifier letters (`Lm`), letter numerals (`Nl`) and the two characters `\u0024 ‘$’` and `\u005F ‘_’`. +1. Digits `‘0’ | ... | ‘9’`. +1. Parentheses `‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ `. +1. Delimiter characters ``‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ ``. +1. Operator characters. These consist of all printable ASCII characters (`\u0020` - `\u007E`) that are in none of the sets above, mathematical symbols (`Sm`) and other symbols (`So`). + +## Optional Braces + +The principle of optional braces is that any keyword that can be followed by `{` can also be followed by an indented block, without needing an intervening `:`. +(Allowing an optional `:` would be counterproductive since it would introduce several ways to do the same thing.) + +The lexical analyzer inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). + +´\color{red}{\text{TODO SCALA3: Port soft-modifier.md and link it here.}}´ + +In the context-free productions below we use the notation `<<< ts >>>` to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. +Analogously, the notation `:<<< ts >>>` indicates a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent` that follows a `colon` token. + +A `colon` token reads as the standard colon "`:`" but is generated instead of it where `colon` is legal according to the context free syntax, but only if the previous token is an alphanumeric identifier, a backticked identifier, or one of the tokens `this`, `super`, `new`, "`)`", and "`]`". + +``` +colon ::= ':' -- with side conditions explained above + <<< ts >>> ::= ‘{’ ts ‘}’ + | indent ts outdent +:<<< ts >>> ::= [nl] ‘{’ ts ‘}’ + | colon indent ts outdent +``` + +## Identifiers + +```ebnf +op ::= opchar {opchar} +varid ::= lower idrest +boundvarid ::= varid + | ‘`’ varid ‘`’ +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] +escapeSeq ::= UnicodeEscape | charEscapeSeq +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ +``` + +There are three ways to form an identifier. +First, an identifier can start with a letter, followed by an arbitrary sequence of letters and digits. +This may be followed by underscore `‘_‘` characters and another string composed of either letters and digits or of operator characters. +Second, an identifier can start with an operator character followed by an arbitrary sequence of operator characters. +The preceding two forms are called _plain_ identifiers. +Finally, an identifier may also be formed by an arbitrary string between backquotes (host systems may impose some restrictions on which strings are legal for identifiers). +The identifier then is composed of all characters excluding the backquotes themselves. + +As usual, the longest match rule applies. +For instance, the string + +```scala +big_bob++=`def` +``` + +decomposes into the three identifiers `big_bob`, `++=`, and +`def`. + +The rules for pattern matching further distinguish between _variable identifiers_, which start with a lower case letter or `_`, and _constant identifiers_, which do not. + +For this purpose, lower case letters include not only a-z, but also all characters in Unicode category Ll (lowercase letter), as well as all letters that have contributory property Other_Lowercase, except characters in category Nl (letter numerals), which are never taken as lower case. + +The following are examples of variable identifiers: + +> ```scala +> x maxIndex p2p empty_? +> `yield` αρετη _y dot_product_* +> __system _MAX_LEN_ +> ªpple ʰelper +> ``` + +Some examples of constant identifiers are + +> ```scala +> + Object $reserved Džul ǂnûm +> ⅰ_ⅲ Ⅰ_Ⅲ ↁelerious ǃqhàà ʹthatsaletter +> ``` + +The ‘$’ character is reserved for compiler-synthesized identifiers. +User programs should not define identifiers that contain ‘$’ characters. + +### Regular keywords + +The following names are reserved words instead of being members of the syntactic class `id` of lexical identifiers. + +```scala +abstract case catch class def do else +enum export extends false final finally for +given if implicit import lazy match new +null object override package private protected return +sealed super then throw trait true try +type val var while with yield +: = <- => <: >: # +@ =>> ?=> +``` + +### Soft keywords + +Additionally, the following soft keywords are reserved only in some situations. + +´\color{red}{\text{TODO SCALA3: Port soft-modifier.md and link it here.}}´ + +``` +as derives end extension infix inline opaque open transparent using | * + - +``` + + + +> When one needs to access Java identifiers that are reserved words in Scala, use backquote-enclosed strings. +> For instance, the statement `Thread.yield()` is illegal, since `yield` is a reserved word in Scala. +> However, here's a work-around: `` Thread.`yield`() `` + +## Newline Characters + +```ebnf +semi ::= ‘;’ | nl {nl} +``` + +Scala is a line-oriented language where statements may be terminated by semi-colons or newlines. +A newline in a Scala source text is treated as the special token “nl” if the three following criteria are satisfied: + +1. The token immediately preceding the newline can terminate a statement. +1. The token immediately following the newline can begin a statement. +1. The token appears in a region where newlines are enabled. + +The tokens that can terminate a statement are: literals, identifiers and the following delimiters and reserved words: + +```scala +this null true false return type +_ ) ] } +``` + +The tokens that can begin a statement are all Scala tokens _except_ the following delimiters and reserved words: + +```scala +catch else extends finally forSome match +with yield , . ; : = => <- <: <% +>: # [ ) ] } +``` + +A `case` token can begin a statement only if followed by a +`class` or `object` token. + +Newlines are enabled in: + +1. all of a Scala source file, except for nested regions where newlines are disabled, and +1. the interval between matching `{` and `}` brace tokens, except for nested regions where newlines are disabled. + +Newlines are disabled in: + +1. the interval between matching `(` and `)` parenthesis tokens, except for nested regions where newlines are enabled, and +1. the interval between matching `[` and `]` bracket tokens, except for nested regions where newlines are enabled. +1. The interval between a `case` token and its matching `=>` token, except for nested regions where newlines are enabled. +1. Any regions analyzed in [XML mode](#xml-mode). + +Note that the brace characters of `{...}` escapes in XML and string literals are not tokens, and therefore do not enclose a region where newlines are enabled. + +Normally, only a single `nl` token is inserted between two consecutive non-newline tokens which are on different lines, even if there are multiple lines between the two tokens. +However, if two tokens are separated by at least one completely blank line (i.e a line which contains no printable characters), then two `nl` tokens are inserted. + +The Scala grammar (given in full [here](13-syntax-summary.html)) contains productions where optional `nl` tokens, but not semicolons, are accepted. +This has the effect that a new line in one of these positions does not terminate an expression or statement. +These positions can be summarized as follows: + +Multiple newline tokens are accepted in the following places (note that a semicolon in place of the newline would be illegal in every one of these cases): + +- between the condition of a [conditional expression](06-expressions.html#conditional-expressions) or [while loop](06-expressions.html#while-loop-expressions) and the next following expression, +- between the enumerators of a [for-comprehension](06-expressions.html#for-comprehensions-and-for-loops) and the next following expression, and +- after the initial `type` keyword in a [type definition or declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). + +A single new line token is accepted + +- in front of an opening brace ‘{’, if that brace is a legal continuation of the current statement or expression, +- after an [infix operator](06-expressions.html#prefix,-infix,-and-postfix-operations), if the first token on the next line can start an expression, +- in front of a [parameter clause](04-basic-declarations-and-definitions.html#function-declarations-and-definitions), and +- after an [annotation](11-annotations.html#user-defined-annotations). + +> The newline tokens between the two lines are not treated as statement separators. +> +> ```scala +> if (x > 0) +> x = x - 1 +> +> while (x > 0) +> x = x / 2 +> +> for (x <- 1 to 10) +> println(x) +> +> type +> IntList = List[Int] +> ``` + + + +> ```scala +> new Iterator[Int] +> { +> private var x = 0 +> def hasNext = true +> def next = { x += 1; x } +> } +> ``` +> +> With an additional newline character, the same code is interpreted as an object creation followed by a local block: +> +> ```scala +> new Iterator[Int] +> +> { +> private var x = 0 +> def hasNext = true +> def next = { x += 1; x } +> } +> ``` + + + +> ```scala +> x < 0 || +> x > 10 +> ``` +> +> With an additional newline character, the same code is interpreted as two expressions: +> +> ```scala +> x < 0 || +> +> x > 10 +> ``` + + + +> ```scala +> def func(x: Int) +> (y: Int) = x + y +> ``` +> +> With an additional newline character, the same code is interpreted as an abstract method definition and a syntactically illegal statement: +> +> ```scala +> def func(x: Int) +> +> (y: Int) = x + y +> ``` + + + +> ```scala +> @serializable +> protected class Data { ... } +> ``` +> +> With an additional newline character, the same code is interpreted as an attribute and a separate statement (which is syntactically illegal). +> +> ```scala +> @serializable +> +> protected class Data { ... } +> ``` + +## Literals + +There are literals for integer numbers, floating point numbers, characters, booleans, strings. +The syntax of these literals is in each case as in Java. + + + +```ebnf +Literal ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral + | interpolatedString + | ‘null’ +``` + +### Integer Literals + +```ebnf +integerLiteral ::= (decimalNumeral | hexNumeral) + [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +``` + +Values of type `Int` are all integer numbers between $-2\^{31}$ and $2\^{31}-1$, inclusive. +Values of type `Long` are all integer numbers between $-2\^{63}$ and +$2\^{63}-1$, inclusive. +A compile-time error occurs if an integer literal denotes a number outside these ranges. + +Integer literals are usually of type `Int`, or of type `Long` when followed by a `L` or `l` suffix. +(Lowercase `l` is deprecated for reasons of legibility.) + +However, if the expected type [_pt_](06-expressions.html#expression-typing) of a literal in an expression is either `Byte`, `Short`, or `Char` and the integer number fits in the numeric range defined by the type, then the number is converted to type _pt_ and the literal's type is _pt_. +The numeric ranges given by these types are: + +| | | +|----------------|--------------------------| +|`Byte` | ´-2\^7´ to ´2\^7-1´ | +|`Short` | ´-2\^{15}´ to ´2\^{15}-1´| +|`Char` | ´0´ to ´2\^{16}-1´ | + +The digits of a numeric literal may be separated by arbitrarily many underscores for purposes of legibility. + +> ```scala +> 0 21_000 0x7F -42L 0xFFFF_FFFF +> ``` + +### Floating Point Literals + +```ebnf +floatingPointLiteral + ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] + | decimalNumeral exponentPart [floatType] + | decimalNumeral floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] +``` + +Floating point literals are of type `Float` when followed by a floating point type suffix `F` or `f`, and are of type `Double` otherwise. +The type `Float` consists of all IEEE 754 32-bit single-precision binary floating point values, whereas the type `Double` consists of all IEEE 754 64-bit double-precision binary floating point values. + +If a floating point literal in a program is followed by a token starting with a letter, there must be at least one intervening whitespace character between the two tokens. + +> ```scala +> 0.0 1e30f 3.14159f 1.0e-100 .1 +> ``` + + + +> The phrase `1.toString` parses as three different tokens: the integer literal `1`, a `.`, and the identifier `toString`. + + + +> `1.` is not a valid floating point literal because the mandatory digit after the `.` is missing. + +### Boolean Literals + +```ebnf +booleanLiteral ::= ‘true’ | ‘false’ +``` + +The boolean literals `true` and `false` are members of type `Boolean`. + +### Character Literals + +```ebnf +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ +``` + +A character literal is a single character enclosed in quotes. +The character can be any Unicode character except the single quote delimiter or `\u000A` (LF) or `\u000D` (CR); or any Unicode character represented by an +[escape sequence](#escape-sequences). + +> ```scala +> 'a' '\u0041' '\n' '\t' +> ``` + +### String Literals + +```ebnf +stringLiteral ::= ‘"’ {stringElement} ‘"’ +stringElement ::= charNoDoubleQuoteOrNewline | escapeSeq +``` + +A string literal is a sequence of characters in double quotes. +The characters can be any Unicode character except the double quote delimiter or `\u000A` (LF) or `\u000D` (CR); or any Unicode character represented by an [escape sequence](#escape-sequences). + +If the string literal contains a double quote character, it must be escaped using +`"\""`. + +The value of a string literal is an instance of class `String`. + +> ```scala +> "Hello, world!\n" +> "\"Hello,\" replied the world." +> ``` + +#### Multi-Line String Literals + +```ebnf +stringLiteral ::= ‘"""’ multiLineChars ‘"""’ +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} +``` + +A multi-line string literal is a sequence of characters enclosed in triple quotes `""" ... """`. +The sequence of characters is arbitrary, except that it may contain three or more consecutive quote characters only at the very end. +Characters must not necessarily be printable; newlines or other control characters are also permitted. +[Escape sequences](#escape-sequences) are not processed, except for Unicode escapes (this is deprecated since 2.13.2). + +> ```scala +> """the present string +> spans three +> lines.""" +> ``` +> +> This would produce the string: +> +> ```scala +> the present string +> spans three +> lines. +> ``` +> +> The Scala library contains a utility method `stripMargin` which can be used to strip leading whitespace from multi-line strings. +> The expression +> +> ```scala +> """the present string +> |spans three +> |lines.""".stripMargin +> ``` +> +> evaluates to +> +> ```scala +> the present string +> spans three +> lines. +> ``` +> +> Method `stripMargin` is defined in class +> [scala.collection.StringOps](https://www.scala-lang.org/api/current/scala/collection/StringOps.html#stripMargin:String). + +#### Interpolated string + +```ebnf +interpolatedString ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘$$’ + | ‘$"’ + | ‘$’ alphaid + | ‘$’ BlockExpr +alphaid ::= upper idrest + | varid + +``` + +An interpolated string consists of an identifier starting with a letter immediately followed by a string literal. +There may be no whitespace characters or comments between the leading identifier and the opening quote `"` of the string. +The string literal in an interpolated string can be standard (single quote) or multi-line (triple quote). + +Inside an interpolated string none of the usual escape characters are interpreted no matter whether the string literal is normal (enclosed in single quotes) or multi-line (enclosed in triple quotes). +Note that the sequence `\"` does not close a normal string literal (enclosed in single quotes). + +There are three forms of dollar sign escape. +The most general form encloses an expression in `${` and `}`, i.e. `${expr}`. +The expression enclosed in the braces that follow the leading `$` character is of syntactical category BlockExpr. +Hence, it can contain multiple statements, and newlines are significant. +Single ‘$’-signs are not permitted in isolation in an interpolated string. +A single ‘$’-sign can still be obtained by doubling the ‘$’ character: ‘$$’. +A single ‘"’-sign can be obtained by the sequence ‘\$"’. + +The simpler form consists of a ‘$’-sign followed by an identifier starting with a letter and followed only by letters, digits, and underscore characters, e.g., `$id`. +The simpler form is expanded by putting braces around the identifier, e.g., `$id` is equivalent to `${id}`. +In the following, unless we explicitly state otherwise, we assume that this expansion has already been performed. + +The expanded expression is type checked normally. +Usually, `StringContext` will resolve to the default implementation in the scala package, but it could also be user-defined. +Note that new interpolators can also be added through implicit conversion of the built-in `scala.StringContext`. + +One could write an extension +```scala +implicit class StringInterpolation(s: StringContext) { + def id(args: Any*) = ??? +} +``` + +### Escape Sequences + +The following character escape sequences are recognized in character and string literals. + +| charEscapeSeq | unicode | name | char | +|---------------|----------|-----------------|--------| +| `‘\‘ ‘b‘` | `\u0008` | backspace | `BS` | +| `‘\‘ ‘t‘` | `\u0009` | horizontal tab | `HT` | +| `‘\‘ ‘n‘` | `\u000a` | linefeed | `LF` | +| `‘\‘ ‘f‘` | `\u000c` | form feed | `FF` | +| `‘\‘ ‘r‘` | `\u000d` | carriage return | `CR` | +| `‘\‘ ‘"‘` | `\u0022` | double quote | `"` | +| `‘\‘ ‘'‘` | `\u0027` | single quote | `'` | +| `‘\‘ ‘\‘` | `\u005c` | backslash | `\` | + +In addition, Unicode escape sequences of the form `\uxxxx`, where each `x` is a hex digit are recognized in character and string literals. + +It is a compile time error if a backslash character in a character or string literal does not start a valid escape sequence. + +## Whitespace and Comments + +Tokens may be separated by whitespace characters and/or comments. +Comments come in two forms: + +A single-line comment is a sequence of characters which starts with `//` and extends to the end of the line. + +A multi-line comment is a sequence of characters between `/*` and `*/`. +Multi-line comments may be nested, but are required to be properly nested. +Therefore, a comment like `/* /* */` will be rejected as having an unterminated comment. + +## Trailing Commas in Multi-line Expressions + +If a comma (`,`) is followed immediately, ignoring whitespace, by a newline and a closing parenthesis (`)`), bracket (`]`), or brace (`}`), then the comma is treated as a "trailing comma" and is ignored. +For example: + +```scala +foo( + 23, + "bar", + true, +) +``` + +## XML mode + +In order to allow literal inclusion of XML fragments, lexical analysis switches from Scala mode to XML mode when encountering an opening angle bracket ‘<’ in the following circumstance: +The ‘<’ must be preceded either by whitespace, an opening parenthesis or an opening brace and immediately followed by a character starting an XML name. + +```ebnf + ( whitespace | ‘(’ | ‘{’ ) ‘<’ (XNameStart | ‘!’ | ‘?’) + + XNameStart ::= ‘_’ | BaseChar | Ideographic // as in W3C XML, but without ‘:’ +``` + +The scanner switches from XML mode to Scala mode if either + +- the XML expression or the XML pattern started by the initial ‘<’ has been successfully parsed, or if +- the parser encounters an embedded Scala expression or pattern and forces the Scanner back to normal mode, until the Scala expression or pattern is successfully parsed. +In this case, since code and XML fragments can be nested, the parser has to maintain a stack that reflects the nesting of XML and Scala expressions adequately. + +Note that no Scala tokens are constructed in XML mode, and that comments are interpreted as text. + +> The following value definition uses an XML literal with two embedded Scala expressions: +> +> ```scala +> val b = +> The Scala Language Specification +> {scalaBook.version} +> {scalaBook.authors.mkList("", ", ", "")} +> +> ``` diff --git a/docs/_spec/02-identifiers-names-and-scopes.md b/docs/_spec/02-identifiers-names-and-scopes.md new file mode 100644 index 000000000000..2b34ae8844cf --- /dev/null +++ b/docs/_spec/02-identifiers-names-and-scopes.md @@ -0,0 +1,158 @@ +--- +title: Identifiers, Names & Scopes +layout: default +chapter: 2 +--- + +# Identifiers, Names and Scopes + +Names in Scala identify types, values, methods, and classes which are collectively called _entities_. +Names are introduced by local +[definitions and declarations](04-basic-declarations-and-definitions.html#basic-declarations-and-definitions), +[inheritance](05-classes-and-objects.html#class-members), +[import clauses](04-basic-declarations-and-definitions.html#import-clauses), or +[package clauses](09-top-level-definitions.html#packagings) +which are collectively called _bindings_. + +Bindings of different kinds have precedence defined on them: + +1. Definitions and declarations that are local, inherited, or made available by a package clause and also defined in the same compilation unit as the reference to them, have the highest precedence. +1. Explicit imports have the next highest precedence. +1. Wildcard imports have the next highest precedence. +1. Definitions made available by a package clause, but not also defined in the same compilation unit as the reference to them, as well as imports which are supplied by the compiler but not explicitly written in source code, have the lowest precedence. + +There are two different name spaces, one for [types](03-types.html#types) and one for [terms](06-expressions.html#expressions). +The same name may designate a type and a term, depending on the context where the name is used. + +A binding has a _scope_ in which the entity defined by a single +name can be accessed using a simple name. +Scopes are nested. +A binding in some inner scope _shadows_ bindings of lower precedence in the same scope as well as bindings of the same or lower precedence in outer scopes. + +Note that shadowing is only a partial order. +In the following example, neither binding of `x` shadows the other. +Consequently, the reference to `x` in the last line of the block is ambiguous. + +```scala +val x = 1 +locally { + import p.X.x + x +} +``` + +A reference to an unqualified (type- or term-) identifier ´x´ is bound by the unique binding, which + +- defines an entity with name ´x´ in the same namespace as the identifier, and +- shadows all other bindings that define entities with name ´x´ in that namespace. + +It is an error if no such binding exists. +If ´x´ is bound by an import clause, then the simple name ´x´ is taken to be equivalent to the qualified name to which ´x´ is mapped by the import clause. +If ´x´ is bound by a definition or declaration, then ´x´ refers to the entity introduced by that binding. +In that case, the type of ´x´ is the type of the referenced entity. + +A reference to a qualified (type- or term-) identifier ´e.x´ refers to the member of the type ´T´ of ´e´ which has the name ´x´ in the same namespace as the identifier. +It is an error if ´T´ is not a [value type](03-types.html#value-types). +The type of ´e.x´ is the member type of the referenced entity in ´T´. + +Binding precedence implies that the way source is bundled in files affects name resolution. +In particular, imported names have higher precedence than names, defined in other files, that might otherwise be visible because they are defined in either the current package or an enclosing package. + +Note that a package definition is taken as lowest precedence, since packages are open and can be defined across arbitrary compilation units. + +```scala +package util { + import scala.util + class Random + object Test extends App { + println(new util.Random) // scala.util.Random + } +} +``` + +The compiler supplies imports in a preamble to every source file. +This preamble conceptually has the following form, where braces indicate nested scopes: + +```scala +import java.lang._ +{ + import scala._ + { + import Predef._ + { /* source */ } + } +} +``` + +These imports are taken as lowest precedence, so that they are always shadowed by user code, which may contain competing imports and definitions. +They also increase the nesting depth as shown, so that later imports shadow earlier ones. + +As a convenience, multiple bindings of a type identifier to the same underlying type is permitted. +This is possible when import clauses introduce a binding of a member type alias with the same binding precedence, typically through wildcard imports. +This allows redundant type aliases to be imported without introducing an ambiguity. + +```scala +object X { type T = annotation.tailrec } +object Y { type T = annotation.tailrec } +object Z { + import X._, Y._, annotation.{tailrec => T} // OK, all T mean tailrec + @T def f: Int = { f ; 42 } // error, f is not tail recursive +} +``` + +Similarly, imported aliases of names introduced by package statements are allowed, even though the names are strictly ambiguous: + +```scala +// c.scala +package p { class C } + +// xy.scala +import p._ +package p { class X extends C } +package q { class Y extends C } +``` + +The reference to `C` in the definition of `X` is strictly ambiguous because `C` is available by virtue of the package clause in a different file, and can't shadow the imported name. +But because the references are the same, the definition is taken as though it did shadow the import. + +###### Example + +Assume the following two definitions of objects named `X` in packages `p` and `q` in separate compilation units. + +```scala +package p { + object X { val x = 1; val y = 2 } +} + +package q { + object X { val x = true; val y = false } +} +``` + +The following program illustrates different kinds of bindings and precedences between them. + +```scala +package p { // `X' bound by package clause +import Console._ // `println' bound by wildcard import +object Y { + println(s"L4: $X") // `X' refers to `p.X' here + locally { + import q._ // `X' bound by wildcard import + println(s"L7: $X") // `X' refers to `q.X' here + import X._ // `x' and `y' bound by wildcard import + println(s"L9: $x") // `x' refers to `q.X.x' here + locally { + val x = 3 // `x' bound by local definition + println(s"L12: $x") // `x' refers to constant `3' here + locally { + import q.X._ // `x' and `y' bound by wildcard import +// println(s"L15: $x") // reference to `x' is ambiguous here + import X.y // `y' bound by explicit import + println(s"L17: $y") // `y' refers to `q.X.y' here + locally { + val x = "abc" // `x' bound by local definition + import p.X._ // `x' and `y' bound by wildcard import +// println(s"L21: $y") // reference to `y' is ambiguous here + println(s"L22: $x") // `x' refers to string "abc" here +}}}}}} +``` diff --git a/docs/_spec/03-types.md b/docs/_spec/03-types.md new file mode 100644 index 000000000000..bbaac5de03a0 --- /dev/null +++ b/docs/_spec/03-types.md @@ -0,0 +1,853 @@ +--- +title: Types +layout: default +chapter: 3 +--- + +# Types + +```ebnf + Type ::= FunctionArgTypes ‘=>’ Type + | TypeLambdaParams ‘=>>’ Type + | InfixType + FunctionArgTypes ::= InfixType + | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ + TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’ + TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + InfixType ::= CompoundType {id [nl] CompoundType} + CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] + | Refinement + AnnotType ::= SimpleType {Annotation} + SimpleType ::= SimpleType TypeArgs + | SimpleType ‘#’ id + | StableId + | Path ‘.’ ‘type’ + | Literal + | ‘(’ Types ‘)’ + TypeArgs ::= ‘[’ Types ‘]’ + Types ::= Type {‘,’ Type} +``` + +We distinguish between proper types and type constructors, which take type parameters and yield types. +All types have a _kind_, either the kind of proper types or a _higher kind_. +A subset of proper types called _value types_ represents sets of (first-class) values. +Types are either _concrete_ or _abstract_. + +Every concrete value type can be represented as a _class type_, i.e. a [type designator](#type-designators) that refers to a [class or a trait](05-classes-and-objects.html#class-definitions) [^1], or as a [compound type](#compound-types) representing an intersection of types, possibly with a [refinement](#compound-types) that further constrains the types of its members. + + +Abstract types are introduced by [type parameters](04-basic-declarations-and-definitions.html#type-parameters) and [abstract type bindings](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). +Parentheses in types can be used for grouping. + +[^1]: We assume that objects and packages also implicitly + define a class (of the same name as the object or package, but + inaccessible to user programs). + +Non-value types capture properties of identifiers that [are not values](#non-value-types). +For example, a [type constructor](#type-constructors) does not directly specify a type of values. +However, when a type constructor is applied to the correct type arguments, it yields a proper type, which may be a value type. + +Non-value types are expressed indirectly in Scala. +E.g., a method type is described by writing down a method signature, which in itself is not a real type, although it gives rise to a corresponding [method type](#method-types). +Type constructors are another example, as one can write `type Swap[m[_, _], a,b] = m[b, a]`, but there is no syntax to write the corresponding anonymous type function directly. + +`AnyKind` is the super type of all types in the Scala type system. +It has all possible kinds to encode [kind polymorphism](#kind-polymorphism). +As such, it is neither a value type nor a type constructor. + +## Paths + +```ebnf +Path ::= StableId + | [id ‘.’] this +StableId ::= id + | Path ‘.’ id + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id +ClassQualifier ::= ‘[’ id ‘]’ +``` + +Paths are not types themselves, but they can be a part of named types and in that function form a central role in Scala's type system. + +A path is one of the following. + +- The empty path ε (which cannot be written explicitly in user programs). +- ´C.´`this`, where ´C´ references a class. + The path `this` is taken as a shorthand for ´C.´`this` where ´C´ is the name of the class directly enclosing the reference. +- ´p.x´ where ´p´ is a path and ´x´ is a stable member of ´p´. + _Stable members_ are packages or members introduced by object definitions or by value definitions of [non-volatile types](#volatile-types). +- ´C.´`super`´.x´ or ´C.´`super`´[M].x´ + where ´C´ references a class and ´x´ references a stable member of the super class or designated parent class ´M´ of ´C´. + The prefix `super` is taken as a shorthand for ´C.´`super` where ´C´ is the name of the class directly enclosing the reference. + +A _stable identifier_ is a path which ends in an identifier. + +## Value Types + +Every value in Scala has a type which is of one of the following forms. + +### Singleton Types + +```ebnf +SimpleType ::= Path ‘.’ ‘type’ +``` + +A _singleton type_ is of the form ´p.´`type`. +Where ´p´ is a path pointing to a value which [conforms](06-expressions.html#expression-typing) to `scala.AnyRef`, the type denotes the set of values consisting of `null` and the value denoted by ´p´ (i.e., the value ´v´ for which `v eq p`). +Where the path does not conform to `scala.AnyRef` the type denotes the set consisting of only the value denoted by ´p´. + + + +### Literal Types + +```ebnf +SimpleType ::= Literal +``` + +A literal type `lit` is a special kind of singleton type which denotes the single literal value `lit`. +Thus, the type ascription `1: 1` gives the most precise type to the literal value `1`: the literal type `1`. + +At run time, an expression `e` is considered to have literal type `lit` if `e == lit`. +Concretely, the result of `e.isInstanceOf[lit]` and `e match { case _ : lit => }` is determined by evaluating `e == lit`. + +Literal types are available for all types for which there is dedicated syntax except `Unit`. +This includes the numeric types (other than `Byte` and `Short` which don't currently have syntax), `Boolean`, `Char` and `String`. + +### Stable Types +A _stable type_ is a singleton type, a literal type, or a type that is declared to be a subtype of trait `scala.Singleton`. + +### Type Projection + +```ebnf +SimpleType ::= SimpleType ‘#’ id +``` + +A _type projection_ ´T´#´x´ references the type member named ´x´ of type ´T´. + + + +### Type Designators + +```ebnf +SimpleType ::= StableId +``` + +A _type designator_ refers to a named value type. +It can be simple or qualified. +All such type designators are shorthands for type projections. + +Specifically, the unqualified type name ´t´ where ´t´ is bound in some class, object, or package ´C´ is taken as a shorthand for +´C.´`this.type#`´t´. +If ´t´ is not bound in a class, object, or package, then ´t´ is taken as a shorthand for ε`.type#`´t´. + +A qualified type designator has the form `p.t` where `p` is a [path](#paths) and _t_ is a type name. +Such a type designator is equivalent to the type projection `p.type#t`. + +###### Example + +Some type designators and their expansions are listed below. +We assume a local type parameter ´t´, a value `maintable` with a type member `Node` and the standard class `scala.Int`, + +| Designator | Expansion | +|-------------------- | --------------------------| +|t | ε.type#t | +|Int | scala.type#Int | +|scala.Int | scala.type#Int | +|data.maintable.Node | data.maintable.type#Node | + +### Parameterized Types + +```ebnf +SimpleType ::= SimpleType TypeArgs +TypeArgs ::= ‘[’ Types ‘]’ +``` + +A _parameterized type_ ´T[ T_1, ..., T_n ]´ consists of a type designator ´T´ and type arguments ´T_1, ..., T_n´ where ´n \geq 1´. +´T´ must refer to a type constructor which takes ´n´ type parameters ´a_1, ..., a_n´. + + +Say the type parameters have lower bounds ´L_1, ..., L_n´ and upper bounds ´U_1, ..., U_n´. +The parameterized type is well-formed if each type argument _conforms to its bounds_, i.e. ´\sigma L_i <: T_i <: \sigma U_i´ where ´\sigma´ is the substitution ´[ a_1 := T_1, ..., a_n := T_n ]´. + +#### Example Parameterized Types + +Given the partial type definitions: + +```scala +class TreeMap[A <: Comparable[A], B] { ... } +class List[A] { ... } +class I extends Comparable[I] { ... } + +class F[M[A], X] { ... } +class S[K <: String] { ... } +class G[M[Z <: I], I] { ... } +``` + +the following parameterized types are well-formed: + +```scala +TreeMap[I, String] +List[I] +List[List[Boolean]] + +F[List, Int] +G[S, String] +``` + +and the following types are ill-formed: + +```scala +TreeMap[I] // illegal: wrong number of parameters +TreeMap[List[I], Int] // illegal: type parameter not within bound + +F[Int, Boolean] // illegal: Int is not a type constructor +F[TreeMap, Int] // illegal: TreeMap takes two parameters, + // F expects a constructor taking one +G[S, Int] // illegal: S constrains its parameter to + // conform to String, + // G expects type constructor with a parameter + // that conforms to Int +``` + +#### Wildcard Type Argument + + +```ebnf +WildcardType ::= ‘_’ TypeBounds +``` + +A _wildcard type argument_ is of the form `_´\;´>:´\,L\,´<:´\,U´`. +A wildcard type must appear as a type argument of a parameterized type. +The parameterized type to which the wildcard type is applied cannot be an abstract type constructor. + +Both bound clauses may be omitted. +If both bounds are omitted, the real bounds are inferred from the bounds of the corresponding type parameter in the target type constructor. +Otherwise, if a lower bound clause `>:´\,L´` is missing, `>:´\,´scala.Nothing` is assumed. +Otherwise, if an upper bound clause `<:´\,U´` is missing, `<:´\,´scala.Any` is assumed. + +Given the [above type definitions](#example-parameterized-types), the following types are well-formed: + +```scala +List[_] // inferred as List[_ >: Nothing <: Any] +List[_ <: java.lang.Number] +S[_ <: String] +F[_, Boolean] +``` + +and the following code contains an ill-formed type: + +```scala +trait H[F[A]]: + def f: F[_] // illegal : an abstract type constructor + // cannot be applied to wildcard arguments. +``` + +Wildcard types may also appear as parts of [infix types](#infix-types), [function types](#function-types), or [tuple types](#tuple-types). +Their expansion is then the expansion in the equivalent parameterized type. + +##### Simplification Rules + +Let ´T[T_1, ..., T_n]´ be a parameterized type. +Then, applying a wildcard type argument ´t´ of the form ´\\_ >: L <: U´ at the ´i´'th position obeys the following equivalences: + +- If the type parameter ´T_i´ is declared covariant, then ´t \equiv U´ +- If the type parameter ´T_i´ is declared contravariant, then ´t \equiv L´ + +### Tuple Types + +```ebnf +SimpleType ::= ‘(’ Types ‘)’ +``` + +A _tuple type_ ´(T_1, ..., T_n)´ where ´n \geq 2´ is an alias for the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`. + +Notes: +- `(´T´)` is just the type ´T´, and not `´T´ *: scala.EmptyTuple`. +- `()` is not a valid type, and not `scala.EmptyTuple`. + +If ´n \leq 22´, the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple` is both a subtype and a supertype of tuple class `scala.Tuple´_n´[´T_1´, ..., ´T_n´]`. + +Tuple classes are case classes whose fields can be accessed using selectors `_1`, ..., `_n`. +Their functionality is abstracted in the corresponding `scala.Product_´n´` trait. +The _n_-ary tuple class and product trait are defined at least as follows in the standard Scala library (they might also add other methods and implement other traits). + +```scala +case class Tuple´_n´[+´T_1´, ..., +´T_n´](_1: ´T_1´, ..., _n: ´T_n´) +extends Product´_n´[´T_1´, ..., ´T_n´] + +trait Product´_n´[+´T_1´, ..., +´T_n´] extends Product: + override def productArity = ´n´ + def _1: ´T_1´ + ... + def _n: ´T_n´ +``` + +### Annotated Types + +```ebnf +AnnotType ::= SimpleType {Annotation} +``` + +An _annotated type_ ´T´ ´a_1, ..., a_n´ attaches [annotations](11-annotations.html#user-defined-annotations) ´a_1, ..., a_n´ to the type ´T´. + +###### Example + +The following type adds the `@suspendable` annotation to the type `String`: + +```scala +String @suspendable +``` + +### Compound Types + +```ebnf +CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] + | Refinement +Refinement ::= [nl] ‘{’ RefineStat {semi RefineStat} ‘}’ +RefineStat ::= Dcl + | ‘type’ TypeDef + | +``` + +A _compound type_ ´T_1´ `with` ... `with` ´T_n \\{ R \\}´ represents objects with members as given in the component types ´T_1, ..., T_n´ and the refinement ´\\{ R \\}´. +A refinement ´\\{ R \\}´ contains declarations and type definitions. +If a declaration or definition overrides a declaration or definition in one of the component types ´T_1, ..., T_n´, the usual rules for [overriding](05-classes-and-objects.html#overriding) apply; otherwise the declaration or definition is said to be “structural” [^2]. + +[^2]: A reference to a structurally defined member (method call or access to a value or variable) may generate binary code that is significantly slower than an equivalent code to a non-structural member. + +Within a method declaration in a structural refinement, the type of any value parameter may only refer to type parameters or abstract types that are contained inside the refinement. +That is, it must refer either to a type parameter of the method itself, or to a type definition within the refinement. +This restriction does not apply to the method's result type. + +If no refinement is given, the empty refinement is implicitly added, i.e. ´T_1´ `with` ... `with` ´T_n´ is a shorthand for ´T_1´ `with` ... `with` ´T_n \\{\\}´. + +A compound type may also consist of just a refinement ´\\{ R \\}´ with no preceding component types. +Such a type is equivalent to `AnyRef` ´\\{ R \\}´. + +###### Example + +The following example shows how to declare and use a method which has a parameter type that contains a refinement with structural declarations. + +```scala +case class Bird (val name: String) extends Object { + def fly(height: Int) = ... +... +} +case class Plane (val callsign: String) extends Object { + def fly(height: Int) = ... +... +} +def takeoff( + runway: Int, + r: { val callsign: String; def fly(height: Int) }) = { + tower.print(r.callsign + " requests take-off on runway " + runway) + tower.read(r.callsign + " is clear for take-off") + r.fly(1000) +} +val bird = new Bird("Polly the parrot"){ val callsign = name } +val a380 = new Plane("TZ-987") +takeoff(42, bird) +takeoff(89, a380) +``` + +Although `Bird` and `Plane` do not share any parent class other than `Object`, the parameter _r_ of method `takeoff` is defined using a refinement with structural declarations to accept any object that declares a value `callsign` and a `fly` method. + +### Infix Types + +```ebnf +InfixType ::= CompoundType {id [nl] CompoundType} +``` + +An _infix type_ ´T_1´ `op` ´T_2´ consists of an infix operator `op` which gets applied to two type operands ´T_1´ and ´T_2´. +The type is equivalent to the type application `op`´[T_1, T_2]´. +The infix operator `op` may be an arbitrary identifier. + +Type operators follow the same [precedence and associativity as term operators](06-expressions.html#prefix-infix-and-postfix-operations). +For example, `A + B * C` parses as `A + (B * C)` and `A | B & C` parses as `A | (B & C)`. +Type operators ending in a colon ‘:’ are right-associative; all other operators are left-associative. + +In a sequence of consecutive type infix operations ´t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, ... \, \mathit{op_n} \, t_n´, all operators ´\mathit{op}\_1, ..., \mathit{op}\_n´ must have the same associativity. +If they are all left-associative, the sequence is interpreted as ´(... (t_0 \mathit{op_1} t_1) \mathit{op_2} ...) \mathit{op_n} t_n´, otherwise it is interpreted as ´t_0 \mathit{op_1} (t_1 \mathit{op_2} ( ... \mathit{op_n} t_n) ...)´. + +The type operators `|` and `&` are not really special. +Nevertheless, unless shadowed, they resolve to `scala.|` and `scala.&`, which represent [union and intersection types](#union-and-intersection-types), respectively. + +### Union and Intersection Types + +Syntactically, the types `S | T` and `S & T` are infix types, where the infix operators are `|` and `&`, respectively (see above). + +However, in this specification, ´S | T´ and ´S & T´ refer to the underlying core concepts of *union and intersection types*, respectively. + +- The type ´S | T´ represents the set of values that are represented by *either* ´S´ or ´T´. +- The type ´S & T´ represents the set of values that are represented by *both* ´S´ and ´T´. + +From the [conformance rules](#conformance) rules on union and intersection types, we can show that ´&´ and ´|´ are *commutative* and *associative*. +Moreover, `&` is distributive over `|`. +For any type ´A´, ´B´ and ´C´, all of the following relationships hold: + +- ´A & B \equiv B & A´, +- ´A | B \equiv B | A´, +- ´(A & B) & C \equiv A & (B & C)´, +- ´(A | B) | C \equiv A | (B | C)´, and +- ´A & (B | C) \equiv (A & B) | (A & C)´. + +If ´C´ is a type constructor, then ´C[A] & C[B]´ can be simplified using the following three rules: + +- If ´C´ is covariant, ´C[A] & C[B] \equiv C[A & B]´ +- If ´C´ is contravariant, ´C[A] & C[B] \equiv C[A | B]´ +- If ´C´ is invariant, emit a compile error + +From the above rules, we can derive the following conformance relationships: + +- When ´C´ is covariant, ´C[A & B] <: C[A] & C[B]´. +- When ´C´ is contravariant, ´C[A | B] <: C[A] & C[B]´. + +#### Join of a union type + +In some situations, a union type might need to be widened to a non-union type. +For this purpose, we define the _join_ of a union type ´T_1 | ... | T_n´ as the smallest intersection type of base class instances of ´T_1, ..., T_n´. +Note that union types might still appear as type arguments in the resulting type, this guarantees that the join is always finite. + +For example, given + +```scala +trait C[+T] +trait D +trait E +class A extends C[A] with D +class B extends C[B] with D with E +``` + +The join of ´A | B´ is ´C[A | B] & D´ + +### Function Types + +```ebnf +Type ::= FunctionArgs ‘=>’ Type +FunctionArgs ::= InfixType + | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ +``` + +The type ´(T_1, ..., T_n) \Rightarrow R´ represents the set of function values that take arguments of types ´T_1, ..., Tn´ and yield results of type ´R´. +The case of exactly one argument type ´T \Rightarrow R´ is a shorthand for ´(T) \Rightarrow R´. +An argument type of the form ´\Rightarrow T´ represents a [call-by-name parameter](04-basic-declarations-and-definitions.md#by-name-parameters) of type ´T´. + +Function types associate to the right, e.g. ´S \Rightarrow T \Rightarrow R´ is the same as ´S \Rightarrow (T \Rightarrow R)´. + +Function types are [covariant](04-basic-declarations-and-definitions.md#variance-annotations) in their result type and [contravariant](04-basic-declarations-and-definitions.md#variance-annotations) in their argument types. + +Function types are shorthands for class types that define an `apply` method. +Specifically, the ´n´-ary function type ´(T_1, ..., T_n) \Rightarrow R´ is a shorthand for the class type `Function´_n´[´T_1´, ..., ´T_n´, ´R´]`. +In particular ´() \Rightarrow R´ is a shorthand for class type `Function´_0´[´R´]`. + +Such class types behave as if they were instances of the following trait: + +```scala +trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: + def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ +``` + +Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.md#the-function-classes) of the standard library page in this document. + +## Non-Value Types + +The types explained in the following do not denote sets of values. + +### Method Types + +A _method type_ is denoted internally as ´(\mathit{Ps})U´, where ´(\mathit{Ps})´ is a sequence of parameter names and types ´(p_1:T_1, ..., p_n:T_n)´ for some ´n \geq 0´ and ´U´ is a (value or method) type. +This type represents named methods that take arguments named ´p_1, ..., p_n´ of types ´T_1, ..., T_n´ and that return a result of type ´U´. + +Method types associate to the right: ´(\mathit{Ps}\_1)(\mathit{Ps}\_2)U´ is treated as ´(\mathit{Ps}\_1)((\mathit{Ps}\_2)U)´. + +A special case are types of methods without any parameters. +They are written here `=> T`. Parameterless methods name expressions that are re-evaluated each time the parameterless method name is referenced. + + +Method types do not exist as types of values. +If a method name is used as a value, its type is [implicitly converted](06-expressions.html#implicit-conversions) to a corresponding function type. + +###### Example + +The declarations + +```scala +def a: Int +def b (x: Int): Boolean +def c (x: Int) (y: String, z: String): String +``` + +produce the typings + +```scala +a: => Int +b: (Int) Boolean +c: (Int) (String, String) String +``` + +### Polymorphic Method Types + +A polymorphic method type is denoted internally as `[´\mathit{tps}\,´]´T´` where `[´\mathit{tps}\,´]` is a type parameter section `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \geq 0´ and ´T´ is a (value or method) type. +This type represents named methods that take type arguments `´S_1, ..., S_n´` which [conform](#parameterized-types) to the lower bounds `´L_1, ..., L_n´` and the upper bounds `´U_1, ..., U_n´` and that yield results of type ´T´. + +###### Example + +The declarations + +```scala +def empty[A]: List[A] +def union[A <: Comparable[A]] (x: Set[A], xs: Set[A]): Set[A] +``` + +produce the typings + +```scala +empty : [A >: Nothing <: Any] List[A] +union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A] +``` + +### Type Constructors + +``` +Type ::= ... | TypeLambdaParams ‘=>>’ Type +TypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ +TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’ +TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] +``` + + + +A _type constructor_ is either: +- a _type lambda_, of the form `[´\mathit{tps}\,´] =>> ´T´` where `[´\mathit{tps}\,´]` is a type parameter clause `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \gt 0´ and ´T´ is either a value type +or another type lambda. +- a reference to a [desugared type declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases) upper-bounded by a type lambda. +- a reference to a [polymorphic class](05-classes-and-objects.html##class-definitions). + +Each type parameter ´a_i´ of a type lambda has a variance ´v_i´ which cannot be written down by the user but is inferred from the body of the type lambda to maximize the number of types that conform to the type lambda. + + +#### Inferred type parameter clause + +To each type constructor corresponds an _inferred type parameter clause_ which is computed as follow: +- For a type lambda, its type parameter clause (including variance annotations). +- For a type declaration upper-bounded by a type lambda ´T´, the inferred clause of ´T´. +- For a polymorphic class, its type parameter clause. + + + +## Kind Polymorphism + +Type parameters are normally partitioned into _kinds_, indicated by the top type of which it is a subtype. +Proper types are the types of values and are subtypes of `Any`. +Higher-kinded types are type constructors such as `List` or `Map`. +Covariant single argument type constructors such as `List` are subtypes of `[+X] =>> Any`. +The `Map` type constructor is a subtype of `[X, +Y] =>> Any`. + +A type can be used only as prescribed by its kind. +Subtypes of `Any` cannot be applied to type arguments whereas subtypes of `[X] =>> Any` _must_ be applied to a type argument, unless they are passed to type parameters of the same kind. + +A type parameter whose upper bound is [`scala.AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) can have any kind and is called an _any-kinded type_. + +```scala +def f[T <: AnyKind] = ... +``` + +The actual type arguments of `f` can then be types of arbitrary kinds. +So the following are all legal: + +```scala +f[Int] +f[List] +f[Map] +f[[X] =>> String] +``` + +Since the actual kind of an any-kinded type is unknown, its usage is heavily restricted. +An any-kinded type can neither be the type of a value, nor be instantiated with type parameters. +The only thing one can do with an any-kinded type is to pass it to another any-kinded type argument. + +`AnyKind` plays a special role in Scala's subtype system. +It is a supertype of all other types, no matter what their kind is. +It is also assumed to be kind-compatible with all other types. +Furthermore, `AnyKind` is itself an any-kinded type, so it cannot be the type of values and it cannot be instantiated. + +## Base Types and Member Definitions + +Types of class members depend on the way the members are referenced. +Central here are three notions, namely: +1. the notion of the set of base types of a type ´T´, +1. the notion of a type ´T´ in some class ´C´ seen from some + prefix type ´S´, +1. the notion of the set of member bindings of some type ´T´. + +These notions are defined mutually recursively as follows. + +1. The set of _base types_ of a type is a set of class types, + given as follows. + - The base types of a class type ´C´ with parents ´T_1, ..., T_n´ are ´C´ itself, as well as the base types of the compound type `´T_1´ with ... with ´T_n´ { ´R´ }`. + - The base types of an aliased type are the base types of its alias. + - The base types of an abstract type are the base types of its upper bound. + - The base types of a parameterized type `´C´[´T_1, ..., T_n´]` are the base types of type ´C´, where every occurrence of a type parameter ´a_i´ of ´C´ has been replaced by the corresponding parameter type ´T_i´. + - The base types of a singleton type `´p´.type` are the base types of the type of ´p´. + - The base types of a compound type `´T_1´ with ... with ´T_n´ { ´R´ }` are the _reduced union_ of the base classes of all ´T_i´'s. + This means: Let the multi-set ´\mathscr{S}´ be the multi-set-union of the base types of all ´T_i´'s. + If ´\mathscr{S}´ contains several type instances of the same class, say `´S^i´#´C´[´T^i_1, ..., T^i_n´]` ´(i \in I)´, then all those instances are replaced by one of them which conforms to all others. + It is an error if no such instance exists. + It follows that the reduced union, if it exists, produces a set of class types, where different types are instances of different classes. + - The base types of a type selection `´S´#´T´` are determined as follows. + If ´T´ is an alias or abstract type, the previous clauses apply. + Otherwise, ´T´ must be a (possibly parameterized) class type, which is defined in some class ´B´. + Then the base types of `´S´#´T´` are the base types of ´T´ in ´B´ seen from the prefix type ´S´. + +1. The notion of a type ´T´ _in class ´C´ seen from some prefix type ´S´_ makes sense only if the prefix type ´S´ has a type instance of class ´C´ as a base type, say `´S'´#´C´[´T_1, ..., T_n´]`. +Then we define as follows. + - If `´S´ = ´\epsilon´.type`, then ´T´ in ´C´ seen from ´S´ is ´T´ itself. + - Otherwise, if ´T´ is the ´i´'th type parameter of some class ´D´, then + - If ´S´ has a base type `´D´[´U_1, ..., U_n´]`, for some type parameters `[´U_1, ..., U_n´]`, then ´T´ in ´C´ seen from ´S´ is ´U_i´. + - Otherwise, if ´C´ is defined in a class ´C'´, then ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´. + - Otherwise, if ´C´ is not defined in another class, then ´T´ in ´C´ seen from ´S´ is ´T´ itself. + - Otherwise, if ´T´ is the singleton type `´D´.this.type` for some class ´D´ then + - If ´D´ is a subclass of ´C´ and ´S´ has a type instance of class ´D´ among its base types, then ´T´ in ´C´ seen from ´S´ is ´S´. + - Otherwise, if ´C´ is defined in a class ´C'´, then ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´. + - Otherwise, if ´C´ is not defined in another class, then ´T´ in ´C´ seen from ´S´ is ´T´ itself. + - If ´T´ is some other type, then the described mapping is performed to all its type components. + +If ´T´ is a possibly parameterized class type, where ´T´'s class is defined in some other class ´D´, and ´S´ is some prefix type, then we use "´T´ seen from ´S´" as a shorthand for "´T´ in ´D´ seen from ´S´". + +1. The _member bindings_ of a type ´T´ are + 1. all bindings ´d´ such that there exists a type instance of some class ´C´ among the base types of ´T´ and there exists a definition or declaration ´d'´ in ´C´ such that ´d´ results from ´d'´ by replacing every type ´T'´ in ´d'´ by ´T'´ in ´C´ seen from ´T´, and + 2. all bindings of the type's [refinement](#compound-types), if it has one. +2. The member bindinds of ´S & T´ are all the binds of ´S´ *and* all the bindins of ´T´. +3. The member bindings of ´S | T´ are the member bindings of its [join](#join-of-a-union-type). + +The _definition_ of a type projection `S#T` is the member binding ´d_T´ of the type `T` in `S`. +In that case, we also say that `S#T` _is defined by_ ´d_T´. + +## Relations between types + +We define the following relations between types. + +| Name | Symbolically | Interpretation | +|------------------|----------------|----------------------------------------------------| +| Equivalence | ´T \equiv U´ | ´T´ and ´U´ are interchangeable in all contexts. | +| Conformance | ´T <: U´ | Type ´T´ conforms to ("is a subtype of") type ´U´. | +| Weak Conformance | ´T <:_w U´ | Augments conformance for primitive numeric types. | +| Compatibility | | Type ´T´ conforms to type ´U´ after conversions. | + +### Equivalence + +´\color{red}{\text{TODO SCALA3: Redefine equivalence as mutual conformance?}}´ + +Equivalence ´(\equiv)´ between types is the smallest congruence [^congruence] such that the following holds: + +- If ´t´ is defined by a type alias `type ´t´ = ´T´`, then ´t´ is equivalent to ´T´. +- If a path ´p´ has a singleton type `´q´.type`, then `´p´.type ´\equiv q´.type`. +- If ´O´ is defined by an object definition, and ´p´ is a path consisting only of package or object selectors and ending in ´O´, then `´O´.this.type ´\equiv p´.type`. +- Two [compound types](#compound-types) are equivalent if the sequences of their component are pairwise equivalent, and occur in the same order, and their refinements are equivalent. Two refinements are equivalent if they bind the same names and the modifiers, types and bounds of every declared entity are equivalent in both refinements. +- Two [method types](#method-types) are equivalent if: + - neither are implicit, or they both are [^implicit]; + - they have equivalent result types; + - they have the same number of parameters; and + - corresponding parameters have equivalent types. + Note that the names of parameters do not matter for method type equivalence. +- Two [polymorphic method types](#polymorphic-method-types) are equivalent if they have the same number of type parameters, and, after renaming one set of type parameters by another, the result types as well as lower and upper bounds of corresponding type parameters are equivalent. +- Two [type constructors](#type-constructors) are equivalent if they have the same number of type parameters, and, after renaming one list of type parameters by another, the result types as well as variances, lower and upper bounds of corresponding type parameters are equivalent. + +[^congruence]: A congruence is an equivalence relation which is closed under formation of contexts. +[^implicit]: A method type is implicit if the parameter section that defines it starts with the `implicit` keyword. + +### Conformance + +The conformance relation ´(<:)´ is the smallest transitive relation that satisfies the following conditions. + +- Conformance includes equivalence. If ´T \equiv U´ then ´T <: U´. +- For every type `´T´` (of any kind), `scala.Nothing <: ´T´ <: scala.AnyKind`. +- For every value type `´T´`, `´T´ <: scala.Any`. +- For every type constructor `´T´` with type parameters `[´U_1´, ..., ´U_n´]`, `[´U_1´, ..., ´U_n´] =>> scala.Nothing <: ´T´ <: [´U_1´, ..., ´U_n´] =>> scala.Any`. +- For every value type ´T´, `scala.Null <: ´T´` unless `´T´ <: scala.AnyVal`. +- A type variable or abstract type ´t´ conforms to its upper bound and its lower bound conforms to ´t´. +- A class type or parameterized type conforms to any of its base-types. +- A singleton type `´p´.type` conforms to the type of the path ´p´. +- A singleton type `´p´.type` conforms to the type `scala.Singleton`. +- A type projection `´T´#´t´` conforms to `´U´#´t´` if ´T´ conforms to ´U´. +- A parameterized type `´T´[´T_1´, ..., ´T_n´]` conforms to `´T´[´U_1´, ..., ´U_n´]` if the following conditions hold for ´i \in \{ 1, ..., n \}´: + 1. If the ´i´'th type parameter of ´T´ is declared covariant, then ´T_i <: U_i´. [^argisnotwildcard] + 1. If the ´i´'th type parameter of ´T´ is declared contravariant, then ´U_i <: T_i´. [^argisnotwildcard] + 1. If the ´i´'th type parameter of ´T´ is declared neither covariant nor contravariant: + 1. If neither ´T_i´ nor ´U_i´ are wildcard type arguments, then ´U_i \equiv T_i´. + 1. If ´T_i´ is a wildcard type argument of the form ´\\_ >: L_1 <: U_1´ and ´U_i´ is a wildcard argument of the form ´\\_ >: L_2 <: U_2´, then ´L_2 <: L_1´ and ´H_1 <: H_2´ (i.e., the ´T_i´ "interval" is contained in the ´U_i´ "interval"). + 1. If ´U_i´ is a wildcard type argument of the form ´\\_ >: L_2 <: U_2´, then ´L_2 <: T_i´ and ´T_i <: U_2´. +- A compound type `´T_1´ with ... with ´T_n´ {´R\,´}` conforms to each of its component types ´T_i´. +- If ´T <: U_i´ for ´i \in \{ 1, ..., n \}´ and for every binding ´d´ of a type or value ´x´ in ´R´ there exists a member binding of ´x´ in ´T´ which subsumes ´d´, then ´T´ conforms to the compound type `´U_1´ with ... with ´U_n´ {´R\,´}`. +- If ´T <: U´, then ´T <: U | W´ and ´T <: W | U´. +- If ´T <: W´ and ´U <: W´, then ´T | U <: W´. +- If ´T <: U´ and ´T <: W´, then ´T <: U & W´. +- If ´T <: W´, then ´T & U <: W´ and ´U & T <: W´. +- If ´T_i \equiv T_i'´ for ´i \in \{ 1, ..., n\}´ and ´U´ conforms to ´U'´ then the method type ´(p_1:T_1, ..., p_n:T_n) U´ conforms to ´(p_1':T_1', ..., p_n':T_n') U'´. +- The polymorphic type ´[a_1 >: L_1 <: U_1, ..., a_n >: L_n <: U_n] T´ conforms to the polymorphic type ´[a_1 >: L_1' <: U_1', ..., a_n >: L_n' <: U_n'] T'´ if, assuming ´L_1' <: a_1 <: U_1', ..., L_n' <: a_n <: U_n'´ one has ´T <: T'´ and ´L_i <: L_i'´ and ´U_i' <: U_i´ for ´i \in \{ 1, ..., n \}´. +- Type constructors ´T´ and ´T'´ follow a similar discipline. +We characterize ´T´ and ´T'´ by their [inferred type parameter clauses](#inferred-type-parameter-clause) ´[a_1, ..., a_n]´ and ´[a_1', ..., a_n']´. +Then, ´T´ conforms to ´T'´ if any list ´[t_1, ..., t_n]´ -- with declared variances, bounds and higher-order type parameter clauses -- of valid type arguments for ´T'´ is also a valid list of type arguments for ´T´ and ´T[t_1, ..., t_n] <: T'[t_1, ..., t_n]´. +Note that this entails that: + - The bounds on ´a_i´ must be weaker than the corresponding bounds declared for ´a'_i´. + - The variance of ´a_i´ must match the variance of ´a'_i´, where covariance matches covariance, contravariance matches contravariance and any variance matches invariance. + - Recursively, these restrictions apply to the corresponding higher-order type parameter clauses of ´a_i´ and ´a'_i´. + + [^argisnotwildcard]: In these cases, if `T_i` and/or `U_i` are wildcard type arguments, the [simplification rules](#simplification-rules) for parameterized types allow to reduce them to real types. + +A declaration or definition in some compound type of class type ´C´ _subsumes_ another declaration of the same name in some compound type or class type ´C'´, if one of the following holds. + +- A value declaration or definition that defines a name ´x´ with type ´T´ subsumes a value or method declaration that defines ´x´ with type ´T'´, provided ´T <: T'´. +- A method declaration or definition that defines a name ´x´ with type ´T´ subsumes a method declaration that defines ´x´ with type ´T'´, provided ´T <: T'´. +- A type alias `type ´t´[´T_1´, ..., ´T_n´] = ´T´` subsumes a type alias `type ´t´[´T_1´, ..., ´T_n´] = ´T'´` if ´T \equiv T'´. +- A type declaration `type ´t´[´T_1´, ..., ´T_n´] >: ´L´ <: ´U´` subsumes a type declaration `type ´t´[´T_1´, ..., ´T_n´] >: ´L'´ <: ´U'´` if ´L' <: L´ and ´U <: U'´. +- A type or class definition that binds a type name ´t´ subsumes an abstract type declaration `type t[´T_1´, ..., ´T_n´] >: L <: U` if ´L <: t <: U´. + +#### Least upper bounds and greatest lower bounds + +The ´(<:)´ relation forms pre-order between types, i.e. it is transitive and reflexive. +This allows us to define _least upper bounds_ and _greatest lower bounds_ of a set of types in terms of that order. + +- the _least upper bound_ of `A` and `B` is the smallest type `L` such that `A` <: `L` and `B` <: `L`. +- the _greatest lower bound_ of `A` and `B` is the largest type `G` such that `G` <: `A` and `G` <: `B`. + +By construction, for all types `A` and `B`, the least upper bound of `A` and `B` is `A | B`, and their greatest lower bound is `A & B`. + +### Weak Conformance + +In some situations Scala uses a more general conformance relation. +A type ´S´ _weakly conforms_ to a type ´T´, written ´S <:_w T´, if ´S <: T´ or both ´S´ and ´T´ are primitive number types and ´S´ precedes ´T´ in the following ordering. + +```scala +Byte ´<:_w´ Short +Short ´<:_w´ Int +Char ´<:_w´ Int +Int ´<:_w´ Long +Long ´<:_w´ Float +Float ´<:_w´ Double +``` + +A _weak least upper bound_ is a least upper bound with respect to weak conformance. + +### Compatibility +A type ´T´ is _compatible_ to a type ´U´ if ´T´ (or its corresponding function type) [weakly conforms](#weak-conformance) to ´U´ after applying [eta-expansion](06-expressions.html#eta-expansion). +If ´T´ is a method type, it's converted to the corresponding function type. +If the types do not weakly conform, the following alternatives are checked in order: +- dropping by-name modifiers: if ´U´ is of the shape `´=> U'´` (and ´T´ is not), `´T <:_w U'´`; +- SAM conversion: if ´T´ corresponds to a function type, and ´U´ declares a single abstract method whose type [corresponds](06-expressions.html#sam-conversion) to the function type ´U'´, `´T <:_w U'´`. +- [implicit conversion](07-implicits.html#views): there's an implicit conversion from ´T´ to ´U´ in scope; + +#### Examples + +##### Function compatibility via SAM conversion + +Given the definitions + +```scala +def foo(x: Int => String): Unit +def foo(x: ToString): Unit + +trait ToString { def convert(x: Int): String } +``` + +The application `foo((x: Int) => x.toString)` [resolves](06-expressions.html#overloading-resolution) to the first overload, as it's more specific: +- `Int => String` is compatible to `ToString` -- when expecting a value of type `ToString`, you may pass a function literal from `Int` to `String`, as it will be SAM-converted to said function; +- `ToString` is not compatible to `Int => String` -- when expecting a function from `Int` to `String`, you may not pass a `ToString`. + +## Volatile Types + +Type volatility approximates the possibility that a type parameter or abstract type instance of a type does not have any non-null values. +A value member of a volatile type cannot appear in a [path](#paths). + +A type is _volatile_ if it falls into one of four categories: + +A compound type `´T_1´ with ... with ´T_n´ {´R\,´}` is volatile if one of the following three conditions hold. + +1. One of ´T_2, ..., T_n´ is a type parameter or abstract type, or +1. ´T_1´ is an abstract type and either the refinement ´R´ or a type ´T_j´ for ´j > 1´ contributes an abstract member to the compound type, or +1. one of ´T_1, ..., T_n´ is a singleton type. + +Here, a type ´S´ _contributes an abstract member_ to a type ´T´ if ´S´ contains an abstract member that is also a member of ´T´. +A refinement ´R´ contributes an abstract member to a type ´T´ if ´R´ contains an abstract declaration which is also a member of ´T´. + +A type designator is volatile if it is an alias of a volatile type, or if it designates a type parameter or abstract type that has a volatile type as its upper bound. + +A singleton type `´p´.type` is volatile, if the underlying type of path ´p´ is volatile. + +## Type Erasure + +A type is called _generic_ if it contains type arguments or type variables. +_Type erasure_ is a mapping from (possibly generic) types to non-generic types. +We write ´|T|´ for the erasure of type ´T´. +The erasure mapping is defined as follows. + +- The erasure of `scala.AnyKind` is `Object`. +- The erasure of an alias type is the erasure of its right-hand side. +- The erasure of an abstract type is the erasure of its upper bound. +- The erasure of the parameterized type `scala.Array´[T_1]´` is `scala.Array´[|T_1|]´`. +- The erasure of every other parameterized type ´T[T_1, ..., T_n]´ is ´|T|´. +- The erasure of a singleton type `´p´.type` is the erasure of the type of ´p´. +- The erasure of a type projection `´T´#´x´` is `|´T´|#´x´`. +- The erasure of a compound type `´T_1´ with ... with ´T_n´ {´R\,´}` is the erasure of the intersection dominator of ´T_1, ..., T_n´. +- The erasure of a union type ´S | T´ is the _erased least upper bound_ (_elub_) of the erasures of ´S´ and ´T´. +- The erasure of an intersection type ´S & T´ is the _eglb_ (erased greatest lower bound) of the erasures of ´S´ and ´T´. + +The erased LUB is computed as follows: + +- if both argument are arrays of objects, an array of the erased LUB of the element types +- if both arguments are arrays of same primitives, an array of this primitive +- if one argument is array of primitives and the other is array of objects, [`Object`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Object.html) +- if one argument is an array, [`Object`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Object.html) +- otherwise a common superclass or trait S of the argument classes, with the following two properties: + - S is minimal: no other common superclass or trait derives from S, and + - S is last: in the linearization of the first argument type ´|A|´ there are no minimal common superclasses or traits that come after S. + The reason to pick last is that we prefer classes over traits that way, which leads to more predictable bytecode and (?) faster dynamic dispatch. + +The rules for ´eglb(A, B)´ are given below in pseudocode: + +``` +eglb(scala.Array[A], JArray[B]) = scala.Array[eglb(A, B)] +eglb(scala.Array[T], _) = scala.Array[T] +eglb(_, scala.Array[T]) = scala.Array[T] +eglb(A, B) = A if A extends B +eglb(A, B) = B if B extends A +eglb(A, _) = A if A is not a trait +eglb(_, B) = B if B is not a trait +eglb(A, _) = A // use first +``` diff --git a/docs/_spec/04-basic-declarations-and-definitions.md b/docs/_spec/04-basic-declarations-and-definitions.md new file mode 100644 index 000000000000..5c45cc5c7819 --- /dev/null +++ b/docs/_spec/04-basic-declarations-and-definitions.md @@ -0,0 +1,758 @@ +--- +title: Basic Declarations & Definitions +layout: default +chapter: 4 +--- + +# Basic Declarations and Definitions + +```ebnf +Dcl ::= ‘val’ ValDcl + | ‘var’ VarDcl + | ‘def’ FunDcl + | ‘type’ {nl} TypeDcl +PatVarDef ::= ‘val’ PatDef + | ‘var’ VarDef +Def ::= PatVarDef + | ‘def’ FunDef + | ‘type’ {nl} TypeDef + | TmplDef +``` + +A _declaration_ introduces names and assigns them types. +It can form part of a [class definition](05-classes-and-objects.html#templates) or of a refinement in a [compound type](03-types.html#compound-types). + +A _definition_ introduces names that denote terms or types. +It can form part of an object or class definition or it can be local to a block. +Both declarations and definitions produce _bindings_ that associate type names with type definitions or bounds, and that associate term names with types. + +The scope of a name introduced by a declaration or definition is the whole statement sequence containing the binding. +However, there is a restriction on forward references in blocks: +In a statement sequence ´s_1 ... s_n´ making up a block, if a simple name in ´s_i´ refers to an entity defined by ´s_j´ where ´j \geq i´, then for all ´s_k´ between and including ´s_i´ and ´s_j´, + +- ´s_k´ cannot be a variable definition. +- If ´s_k´ is a value definition, it must be lazy. + + + +## Value Declarations and Definitions + +```ebnf +Dcl ::= ‘val’ ValDcl +ValDcl ::= ids ‘:’ Type +PatVarDef ::= ‘val’ PatDef +PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr +ids ::= id {‘,’ id} +``` + +A value declaration `val ´x´: ´T´` introduces ´x´ as a name of a value of type ´T´. + +A value definition `val ´x´: ´T´ = ´e´` defines ´x´ as a name of the value that results from the evaluation of ´e´. +If the value definition is not recursive, the type ´T´ may be omitted, in which case the [packed type](06-expressions.html#expression-typing) of expression ´e´ is assumed. +If a type ´T´ is given, then ´e´ is expected to conform to it. + +Evaluation of the value definition implies evaluation of its right-hand side ´e´, unless it has the modifier `lazy`. +The effect of the value definition is to bind ´x´ to the value of ´e´ +converted to type ´T´. +A `lazy` value definition evaluates its right hand side ´e´ the first time the value is accessed. + +A _constant value definition_ is of the form + +```scala +final val x = e +``` + +where `e` is a [constant expression](06-expressions.html#constant-expressions). +The `final` modifier must be present and no type annotation may be given. +References to the constant value `x` are themselves treated as constant expressions; in the generated code they are replaced by the definition's right-hand side `e`. + +Value definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side. +If ´p´ is some pattern other than a simple name or a name followed by a colon and a type, then the value definition `val ´p´ = ´e´` is expanded as follows: + +1. If the pattern ´p´ has bound variables ´x_1, ..., x_n´, where ´n > 1´: + +```scala +val ´\$x´ = ´e´ match {case ´p´ => (´x_1, ..., x_n´)} +val ´x_1´ = ´\$x´._1 +... +val ´x_n´ = ´\$x´._n +``` + +Here, ´\$x´ is a fresh name. + +2. If ´p´ has a unique bound variable ´x´: + +```scala +val ´x´ = ´e´ match { case ´p´ => ´x´ } +``` + +3. If ´p´ has no bound variables: + +```scala +´e´ match { case ´p´ => ()} +``` + +###### Example + +The following are examples of value definitions + +```scala +val pi = 3.1415 +val pi: Double = 3.1415 // equivalent to first definition +val Some(x) = f() // a pattern definition +val x :: xs = mylist // an infix pattern definition +``` + +The last two definitions have the following expansions. + +```scala +val x = f() match { case Some(x) => x } + +val x´\$´ = mylist match { case x :: xs => (x, xs) } +val x = x´\$´._1 +val xs = x´\$´._2 +``` + +The name of any declared or defined value may not end in `_=`. + +A value declaration `val ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of value declarations `val ´x_1´: ´T´; ...; val ´x_n´: ´T´`. +A value definition `val ´p_1, ..., p_n´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1´ = ´e´; ...; val ´p_n´ = ´e´`. +A value definition `val ´p_1, ..., p_n: T´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1: T´ = ´e´; ...; val ´p_n: T´ = ´e´`. + +## Variable Declarations and Definitions + +```ebnf +Dcl ::= ‘var’ VarDcl +PatVarDef ::= ‘var’ VarDef +VarDcl ::= ids ‘:’ Type +VarDef ::= PatDef + | ids ‘:’ Type ‘=’ ‘_’ +``` + +A variable declaration `var ´x´: ´T´` is equivalent to the declarations of both a _getter method_ ´x´ *and* a _setter method_ `´x´_=`: + +```scala +def ´x´: ´T´ +def ´x´_= (´y´: ´T´): Unit +``` + +An implementation of a class may _define_ a declared variable using a variable definition, or by defining the corresponding setter and getter methods. + +A variable definition `var ´x´: ´T´ = ´e´` introduces a mutable variable with type ´T´ and initial value as given by the expression ´e´. +The type ´T´ can be omitted, in which case the type of ´e´ is assumed. +If ´T´ is given, then ´e´ is expected to [conform to it](06-expressions.html#expression-typing). + +Variable definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side. +A variable definition `var ´p´ = ´e´` where ´p´ is a pattern other than a simple name or a name followed by a colon and a type is expanded in the same way as a [value definition](#value-declarations-and-definitions) `val ´p´ = ´e´`, except that the free names in ´p´ are introduced as mutable variables, not values. + +The name of any declared or defined variable may not end in `_=`. + +A variable definition `var ´x´: ´T´ = _` can appear only as a member of a template. +It introduces a mutable field with type ´T´ and a default initial value. +The default value depends on the type ´T´ as follows: + +| default | type ´T´ | +|----------|------------------------------------| +|`0` | `Int` or one of its subrange types | +|`0L` | `Long` | +|`0.0f` | `Float` | +|`0.0d` | `Double` | +|`false` | `Boolean` | +|`()` | `Unit` | +|`null` | all other types | + +When they occur as members of a template, both forms of variable definition also introduce a getter method ´x´ which returns the value currently assigned to the variable, as well as a setter method `´x´_=` which changes the value currently assigned to the variable. +The methods have the same signatures as for a variable declaration. +The template then has these getter and setter methods as members, whereas the original variable cannot be accessed directly as a template member. + +###### Example + +The following example shows how _properties_ can be simulated in Scala. +It defines a class `TimeOfDayVar` of time values with updatable integer fields representing hours, minutes, and seconds. +Its implementation contains tests that allow only legal values to be assigned to these fields. +The user code, on the other hand, accesses these fields just like normal variables. + +```scala +class TimeOfDayVar { + private var h: Int = 0 + private var m: Int = 0 + private var s: Int = 0 + + def hours = h + def hours_= (h: Int) = if (0 <= h && h < 24) this.h = h + else throw new DateError() + + def minutes = m + def minutes_= (m: Int) = if (0 <= m && m < 60) this.m = m + else throw new DateError() + + def seconds = s + def seconds_= (s: Int) = if (0 <= s && s < 60) this.s = s + else throw new DateError() +} +val d = new TimeOfDayVar +d.hours = 8; d.minutes = 30; d.seconds = 0 +d.hours = 25 // throws a DateError exception +``` + +A variable declaration `var ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of variable declarations `var ´x_1´: ´T´; ...; var ´x_n´: ´T´`. +A variable definition `var ´x_1, ..., x_n´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1´ = ´e´; ...; var ´x_n´ = ´e´`. +A variable definition `var ´x_1, ..., x_n: T´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1: T´ = ´e´; ...; var ´x_n: T´ = ´e´`. + +## Type Declarations and Type Aliases + + + +```ebnf +Dcl ::= ‘type’ {nl} TypeDcl +TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] +Def ::= ‘type’ {nl} TypeDef +TypeDef ::= id [TypeParamClause] ‘=’ Type +``` + +### Desugaring of parameterized type declarations +A parameterized type declaration is desugared into an unparameterized type declaration +whose bounds are type lambdas with explicit variance annotations. + +#### Abstract Type +An abstract type +```scala +type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´U´ +``` +is desugared into an unparameterized abstract type as follow: +- If `L` conforms to `Nothing`, then, + + ```scala +type ´t´ >: Nothing + <: [´\mathit{tps'}\,´] =>> ´U´ + ``` +- otherwise, + + ```scala +type ´t´ >: [´\mathit{tps'}\,´] =>> ´L´ + <: [´\mathit{tps'}\,´] =>> ´U´ + ``` + +If at least one of the ´\mathit{tps}´ contains an explicit variance annotation, then ´\mathit{tps'} = \mathit{tps}´, otherwise we infer the variance of each type parameter as with the user-written type lambda `[´\mathit{tps}\,´] =>> ´U´`. + +The same desugaring applies to type parameters. For instance, +```scala +[F[X] <: Coll[X]] +``` +is treated as a shorthand for +```scala +[F >: Nothing <: [X] =>> Coll[X]] +``` + +#### Type Alias +A parameterized type alias +```scala +type ´t´[´\mathit{tps}\,´] = ´T´ +``` +is desugared into an unparameterized type alias +```scala +type ´t´ = [´\mathit{tps'}\,´] =>> ´T´ +``` +where ´\mathit{tps'}´ is computed as in the previous case. + +´\color{red}{\text{TODO SCALA3: Everything else in this section (and the next one +on type parameters) needs to be rewritten to take into account the desugaring described above.}}´ + +A _type declaration_ `type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´U´` declares ´t´ to be an abstract type with lower bound type ´L´ and upper bound type ´U´. +If the type parameter clause `[´\mathit{tps}\,´]` is omitted, ´t´ abstracts over a proper type, otherwise ´t´ stands for a type constructor that accepts type arguments as described by the type parameter clause. + +If a type declaration appears as a member declaration of a type, implementations of the type may implement ´t´ with any type ´T´ for which ´L <: T <: U´. +It is a compile-time error if ´L´ does not conform to ´U´. +Either or both bounds may be omitted. +If the lower bound ´L´ is absent, the bottom type `scala.Nothing` is assumed. +If the upper bound ´U´ is absent, the top type `scala.Any` is assumed. + +A type constructor declaration imposes additional restrictions on the concrete types for which ´t´ may stand. +Besides the bounds ´L´ and ´U´, the type parameter clause may impose higher-order bounds and variances, as governed by the [conformance of type constructors](03-types.html#conformance). + +The scope of a type parameter extends over the bounds `>: ´L´ <: ´U´` and the type parameter clause ´\mathit{tps}´ itself. +A higher-order type parameter clause (of an abstract type constructor ´tc´) has the same kind of scope, restricted to the declaration of the type parameter ´tc´. + +To illustrate nested scoping, these declarations are all equivalent: `type t[m[x] <: Bound[x], Bound[x]]`, `type t[m[x] <: Bound[x], Bound[y]]` and `type t[m[x] <: Bound[x], Bound[_]]`, as the scope of, e.g., the type parameter of ´m´ is limited to the declaration of ´m´. +In all of them, ´t´ is an abstract type member that abstracts over two type constructors: ´m´ stands for a type constructor that takes one type parameter and that must be a subtype of ´Bound´, ´t´'s second type constructor parameter. +`t[MutableList, Iterable]` is a valid use of ´t´. + +A _type alias_ `type ´t´ = ´T´` defines ´t´ to be an alias name for the type ´T´. +The left hand side of a type alias may have a type parameter clause, e.g. `type ´t´[´\mathit{tps}\,´] = ´T´`. +The scope of a type parameter extends over the right hand side ´T´ and the type parameter clause ´\mathit{tps}´ itself. + +The scope rules for [definitions](#basic-declarations-and-definitions) and [type parameters](#method-declarations-and-definitions) make it possible that a type name appears in its own bound or in its right-hand side. +However, it is a static error if a type alias refers recursively to the defined type constructor itself. +That is, the type ´T´ in a type alias `type ´t´[´\mathit{tps}\,´] = ´T´` may not refer directly or indirectly to the name ´t´. +It is also an error if an abstract type is directly or indirectly its own upper or lower bound. + +###### Example + +The following are legal type declarations and definitions: + +```scala +type IntList = List[Integer] +type T <: Comparable[T] +type Two[A] = Tuple2[A, A] +type MyCollection[+X] <: Iterable[X] +``` + +The following are illegal: + +```scala +type Abs = Comparable[Abs] // recursive type alias + +type S <: T // S, T are bounded by themselves. +type T <: S + +type T >: Comparable[T.That] // Cannot select from T. + // T is a type, not a value +type MyCollection <: Iterable // Type constructor members must explicitly + // state their type parameters. +``` + +If a type alias `type ´t´[´\mathit{tps}\,´] = ´S´` refers to a class type ´S´, the name ´t´ can also be used as a constructor for objects of type ´S´. + +###### Example + +Suppose we make `Pair` an alias of the parameterized class `Tuple2`, as follows: + +```scala +type Pair[+A, +B] = Tuple2[A, B] +object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) +} +``` + +As a consequence, for any two types ´S´ and ´T´, the type `Pair[´S´, ´T\,´]` is equivalent to the type `Tuple2[´S´, ´T\,´]`. +`Pair` can also be used as a constructor instead of `Tuple2`, as in: + +```scala +val x: Pair[Int, String] = new Pair(1, "abc") +``` + +## Type Parameters + +```ebnf +TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’ +VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam +TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] [‘:’ Type] +``` + +Type parameters appear in type definitions, class definitions, and method definitions. +In this section we consider only type parameter definitions with lower bounds `>: ´L´` and upper bounds `<: ´U´` whereas a discussion of context bounds `: ´U´` and view bounds `<% ´U´` is deferred to [here](07-implicits.html#context-bounds-and-view-bounds). + +The most general form of a proper type parameter is +`´@a_1 ... @a_n´ ´\pm´ ´t´ >: ´L´ <: ´U´`. +Here, ´L´, and ´U´ are lower and upper bounds that constrain possible type arguments for the parameter. +It is a compile-time error if ´L´ does not conform to ´U´. +´\pm´ is a _variance_, i.e. an optional prefix of either `+`, or `-`. One or more annotations may precede the type parameter. + + + + + +The names of all type parameters must be pairwise different in their enclosing type parameter clause. +The scope of a type parameter includes in each case the whole type parameter clause. +Therefore it is possible that a type parameter appears as part of its own bounds or the bounds of other type parameters in the same clause. +However, a type parameter may not be bounded directly or indirectly by itself. + +A type constructor parameter adds a nested type parameter clause to the type parameter. +The most general form of a type constructor parameter is `´@a_1 ... @a_n \pm t[\mathit{tps}\,]´ >: ´L´ <: ´U´`. + +The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters. +Higher-order type parameters (the type parameters of a type parameter ´t´) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of ´t´. +Therefore, their names must only be pairwise different from the names of other visible parameters. +Since the names of higher-order type parameters are thus often irrelevant, they may be denoted with a `‘_’`, which is nowhere visible. + +###### Example +Here are some well-formed type parameter clauses: + +```scala +[S, T] +[@specialized T, U] +[Ex <: Throwable] +[A <: Comparable[B], B <: A] +[A, B >: A, C >: A <: B] +[M[X], N[X]] +[M[_], N[_]] // equivalent to previous clause +[M[X <: Bound[X]], Bound[_]] +[M[+X] <: Iterable[X]] +``` + +The following type parameter clauses are illegal: + +```scala +[A >: A] // illegal, `A' has itself as bound +[A <: B, B <: C, C <: A] // illegal, `A' has itself as bound +[A, B, C >: A <: B] // illegal lower bound `A' of `C' does + // not conform to upper bound `B'. +``` + +## Variance Annotations + +Variance annotations indicate how instances of parameterized types vary with respect to [subtyping](03-types.html#conformance). +A ‘+’ variance indicates a covariant dependency, a ‘-’ variance indicates a contravariant dependency, and a missing variance indication indicates an invariant dependency. + +A variance annotation constrains the way the annotated type variable may appear in the type or class which binds the type parameter. +In a type definition `type ´T´[´\mathit{tps}\,´] = ´S´`, or a type declaration `type ´T´[´\mathit{tps}\,´] >: ´L´ <: ´U´` type parameters labeled ‘+’ must only appear in covariant position whereas type parameters labeled ‘-’ must only appear in contravariant position. +Analogously, for a class definition `class ´C´[´\mathit{tps}\,´](´\mathit{ps}\,´) extends ´T´ { ´x´: ´S´ => ...}`, type parameters labeled ‘+’ must only appear in covariant position in the self type ´S´ and the template ´T´, whereas type parameters labeled ‘-’ must only appear in contravariant position. + +The variance position of a type parameter in a type or template is defined as follows. +Let the opposite of covariance be contravariance, and the opposite of invariance be itself. +The top-level of the type or template is always in covariant position. +The variance position changes at the following constructs. + +- The variance position of a method parameter is the opposite of the variance position of the enclosing parameter clause. +- The variance position of a type parameter is the opposite of the variance position of the enclosing type parameter clause. +- The variance position of the lower bound of a type declaration or type parameter is the opposite of the variance position of the type declaration or parameter. +- The type of a mutable variable is always in invariant position. +- The right-hand side of a type alias is always in invariant position. +- The prefix ´S´ of a type selection `´S´#´T´` is always in invariant position. +- For a type argument ´T´ of a type `´S´[´... T ...´ ]`: +If the corresponding type parameter is invariant, then ´T´ is in invariant position. +If the corresponding type parameter is contravariant, the variance position of ´T´ is the opposite of the variance position of the enclosing type `´S´[´... T ...´ ]`. + + + +References to the type parameters in [object-private or object-protected values, types, variables, or methods](05-classes-and-objects.html#modifiers) of the class are not checked for their variance position. +In these members the type parameter may appear anywhere without restricting its legal variance annotations. + +###### Example +The following variance annotation is legal. + +```scala +abstract class P[+A, +B] { + def fst: A; def snd: B +} +``` + +With this variance annotation, type instances of ´P´ subtype covariantly with respect to their arguments. +For instance, + +```scala +P[IOException, String] <: P[Throwable, AnyRef] +``` + +If the members of ´P´ are mutable variables, the same variance annotation becomes illegal. + +```scala +abstract class Q[+A, +B](x: A, y: B) { + var fst: A = x // **** error: illegal variance: + var snd: B = y // `A', `B' occur in invariant position. +} +``` + +If the mutable variables are object-private, the class definition becomes legal again: + +```scala +abstract class R[+A, +B](x: A, y: B) { + private[this] var fst: A = x // OK + private[this] var snd: B = y // OK +} +``` + +###### Example + +The following variance annotation is illegal, since ´a´ appears in contravariant position in the parameter of `append`: + +```scala +abstract class Sequence[+A] { + def append(x: Sequence[A]): Sequence[A] + // **** error: illegal variance: + // `A' occurs in contravariant position. +} +``` + +The problem can be avoided by generalizing the type of `append` by means of a lower bound: + +```scala +abstract class Sequence[+A] { + def append[B >: A](x: Sequence[B]): Sequence[B] +} +``` + +###### Example + +```scala +abstract class OutputChannel[-A] { + def write(x: A): Unit +} +``` + +With that annotation, we have that `OutputChannel[AnyRef]` conforms to `OutputChannel[String]`. +That is, a channel on which one can write any object can substitute for a channel on which one can write only strings. + +## Method Declarations and Definitions + +```ebnf +Dcl ::= ‘def’ FunDcl +FunDcl ::= FunSig ‘:’ Type +Def ::= ‘def’ FunDef +FunDef ::= FunSig [‘:’ Type] ‘=’ Expr +FunSig ::= id [FunTypeParamClause] ParamClauses +FunTypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ +ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’] +ParamClause ::= [nl] ‘(’ [Params] ‘)’ +Params ::= Param {‘,’ Param} +Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr] +ParamType ::= Type + | ‘=>’ Type + | Type ‘*’ +``` + +A _method declaration_ has the form `def ´f\,\mathit{psig}´: ´T´`, where ´f´ is the method's name, ´\mathit{psig}´ is its parameter signature and ´T´ is its result type. +A _method definition_ `def ´f\,\mathit{psig}´: ´T´ = ´e´` also includes a _method body_ ´e´, i.e. an expression which defines the method's result. +A parameter signature consists of an optional type parameter clause `[´\mathit{tps}\,´]`, followed by zero or more value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_n´)`. +Such a declaration or definition introduces a value with a (possibly polymorphic) method type whose parameter types and result type are as given. + +The type of the method body is expected to [conform](06-expressions.html#expression-typing) to the method's declared result type, if one is given. +If the method definition is not recursive, the result type may be omitted, in which case it is determined from the packed type of the method body. + +A _type parameter clause_ ´\mathit{tps}´ consists of one or more [type declarations](#type-declarations-and-type-aliases), which introduce type parameters, possibly with bounds. +The scope of a type parameter includes the whole signature, including any of the type parameter bounds as well as the method body, if it is present. + +A _value parameter clause_ ´\mathit{ps}´ consists of zero or more formal parameter bindings such as `´x´: ´T´` or `´x: T = e´`, which bind value parameters and associate them with their types. + +### Default Arguments + +Each value parameter declaration may optionally define a default argument. +The default argument expression ´e´ is type-checked with an expected type ´T'´ obtained by replacing all occurrences of the method's type parameters in ´T´ by the undefined type. + +For every parameter ´p_{i,j}´ with a default argument a method named `´f\$´default´\$´n` is generated which computes the default argument expression. +Here, ´n´ denotes the parameter's position in the method declaration. +These methods are parametrized by the type parameter clause `[´\mathit{tps}\,´]` and all value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_{i-1}´)` preceding ´p_{i,j}´. +The `´f\$´default´\$´n` methods are inaccessible for user programs. + +###### Example +In the method + +```scala +def compare[T](a: T = 0)(b: T = a) = (a == b) +``` + +the default expression `0` is type-checked with an undefined expected +type. +When applying `compare()`, the default value `0` is inserted and `T` is instantiated to `Int`. +The methods computing the default arguments have the form: + +```scala +def compare´\$´default´\$´1[T]: Int = 0 +def compare´\$´default´\$´2[T](a: T): T = a +``` + +The scope of a formal value parameter name ´x´ comprises all subsequent parameter clauses, as well as the method return type and the method body, if they are given. +Both type parameter names and value parameter names must be pairwise distinct. + +A default value which depends on earlier parameters uses the actual arguments if they are provided, not the default arguments. + +```scala +def f(a: Int = 0)(b: Int = a + 1) = b // OK +// def f(a: Int = 0, b: Int = a + 1) // "error: not found: value a" +f(10)() // returns 11 (not 1) +``` + +If an [implicit argument](07-implicits.html#implicit-parameters) is not found by implicit search, it may be supplied using a default argument. + +```scala +implicit val i: Int = 2 +def f(implicit x: Int, s: String = "hi") = s * x +f // "hihi" +``` + +### By-Name Parameters + +```ebnf +ParamType ::= ‘=>’ Type +``` + +The type of a value parameter may be prefixed by `=>`, e.g. `´x´: => ´T´`. +The type of such a parameter is then the parameterless method type `=> ´T´`. +This indicates that the corresponding argument is not evaluated at the point of method application, but instead is evaluated at each use within the method. +That is, the argument is evaluated using _call-by-name_. + +The by-name modifier is disallowed for parameters of classes that carry a `val` or `var` prefix, including parameters of case classes for which a `val` prefix is implicitly generated. + +###### Example +The declaration + +```scala +def whileLoop (cond: => Boolean) (stat: => Unit): Unit +``` + +indicates that both parameters of `whileLoop` are evaluated using call-by-name. + +### Repeated Parameters + +```ebnf +ParamType ::= Type ‘*’ +``` + +The last value parameter of a parameter section may be suffixed by `'*'`, e.g. `(..., ´x´:´T´*)`. +The type of such a _repeated_ parameter inside the method is then the sequence type `scala.Seq[´T´]`. +Methods with repeated parameters `´T´*` take a variable number of arguments of type ´T´. +That is, if a method ´m´ with type `(´p_1:T_1, ..., p_n:T_n, p_s:S´*)´U´` is applied to arguments ´(e_1, ..., e_k)´ where ´k \geq n´, then ´m´ is taken in that application to have type ´(p_1:T_1, ..., p_n:T_n, p_s:S, ..., p_{s'}:S)U´, with ´k - n´ occurrences of type ´S´ where any parameter names beyond ´p_s´ are fresh. The only exception to this rule is if the last argument is marked to be a _sequence argument_ via a `_*` type annotation. +If ´m´ above is applied to arguments `(´e_1, ..., e_n, e'´: _*)`, then the type of ´m´ in that application is taken to be `(´p_1:T_1, ... , p_n:T_n,p_{s}:´scala.Seq[´S´])`. + +It is not allowed to define any default arguments in a parameter section with a repeated parameter. + +###### Example +The following method definition computes the sum of the squares of a variable number of integer arguments. + +```scala +def sum(args: Int*) = { + var result = 0 + for (arg <- args) result += arg + result +} +``` + +The following applications of this method yield `0`, `1`, `6`, in that order. + +```scala +sum() +sum(1) +sum(1, 2, 3) +``` + +Furthermore, assume the definition: + +```scala +val xs = List(1, 2, 3) +``` + +The following application of method `sum` is ill-formed: + +```scala +sum(xs) // ***** error: expected: Int, found: List[Int] +``` + +By contrast, the following application is well formed and yields again the result `6`: + +```scala +sum(xs: _*) +``` + +### Method Return Type Inference + +A class member definition ´m´ that overrides some other method ´m'´ in a base class of ´C´ may leave out the return type, even if it is recursive. +In this case, whether or not `m` is recursive, its return type will be the return type of ´m'´. + +###### Example +Assume the following definitions: + +```scala +trait I { + def factorial(x: Int): Int +} +class C extends I { + def factorial(x: Int) = if (x == 0) 1 else x * factorial(x - 1) +} +``` + +Here, it is OK to leave out the result type of `factorial` in `C`, even though the method is recursive. + + + +## Import Clauses + +```ebnf +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors) +ImportSelectors ::= ‘{’ {ImportSelector ‘,’} + (ImportSelector | ‘_’) ‘}’ +ImportSelector ::= id [‘=>’ id | ‘=>’ ‘_’] +``` + +An import clause has the form `import ´p´.´I´` where ´p´ is a [stable identifier](03-types.html#paths) and ´I´ is an import expression. +The import expression determines a set of names of importable members of ´p´ which are made available without qualification. +A member ´m´ of ´p´ is _importable_ if it is [accessible](05-classes-and-objects.html#modifiers). +The most general form of an import expression is a list of _import selectors_ + +```scala +{ ´x_1´ => ´y_1, ..., x_n´ => ´y_n´, _ } +``` + +for ´n \geq 0´, where the final wildcard `‘_’` may be absent. +It makes available each importable member `´p´.´x_i´` under the unqualified name ´y_i´. I.e. every import selector `´x_i´ => ´y_i´` renames `´p´.´x_i´` to +´y_i´. +If a final wildcard is present, all importable members ´z´ of ´p´ other than `´x_1, ..., x_n,y_1, ..., y_n´` are also made available under their own unqualified names. + +Import selectors work in the same way for type and term members. +For instance, an import clause `import ´p´.{´x´ => ´y\,´}` renames the term +name `´p´.´x´` to the term name ´y´ and the type name `´p´.´x´` to the type name ´y´. +At least one of these two names must reference an importable member of ´p´. + +If the target in an import selector is a wildcard, the import selector hides access to the source member. +For instance, the import selector `´x´ => _` “renames” ´x´ to the wildcard symbol (which is unaccessible as a name in user programs), and thereby effectively prevents unqualified access to ´x´. +This is useful if there is a final wildcard in the same import selector list, which imports all members not mentioned in previous import selectors. + +The scope of a binding introduced by an import-clause starts immediately after the import clause and extends to the end of the enclosing block, template, package clause, or compilation unit, whichever comes first. + +Several shorthands exist. An import selector may be just a simple name ´x´. +In this case, ´x´ is imported without renaming, so the import selector is equivalent to `´x´ => ´x´`. +Furthermore, it is possible to replace the whole import selector list by a single identifier or wildcard. +The import clause `import ´p´.´x´` is equivalent to `import ´p´.{´x\,´}`, i.e. it makes available without qualification the member ´x´ of ´p´. The import clause `import ´p´._` is equivalent to `import ´p´.{_}`, i.e. it makes available without qualification all members of ´p´ (this is analogous to `import ´p´.*` in Java). + +An import clause with multiple import expressions `import ´p_1´.´I_1, ..., p_n´.´I_n´` is interpreted as a sequence of import clauses `import ´p_1´.´I_1´; ...; import ´p_n´.´I_n´`. + +###### Example +Consider the object definition: + +```scala +object M { + def z = 0, one = 1 + def add(x: Int, y: Int): Int = x + y +} +``` + +Then the block + +```scala +{ import M.{one, z => zero, _}; add(zero, one) } +``` + +is equivalent to the block + +```scala +{ M.add(M.z, M.one) } +``` diff --git a/docs/_spec/05-classes-and-objects.md b/docs/_spec/05-classes-and-objects.md new file mode 100644 index 000000000000..6feda780417a --- /dev/null +++ b/docs/_spec/05-classes-and-objects.md @@ -0,0 +1,1214 @@ +--- +title: Classes & Objects +layout: default +chapter: 5 +--- + +# Classes and Objects + +```ebnf +TmplDef ::= [‘case’] ‘class’ ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘trait’ TraitDef +``` + +[Classes](#class-definitions) and [objects](#object-definitions) are both defined in terms of _templates_. + +## Templates + +```ebnf +ClassTemplate ::= [EarlyDefs] ClassParents [TemplateBody] +TraitTemplate ::= [EarlyDefs] TraitParents [TemplateBody] +ClassParents ::= Constr {‘with’ AnnotType} +TraitParents ::= AnnotType {‘with’ AnnotType} +TemplateBody ::= [nl] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’ +SelfType ::= id [‘:’ Type] ‘=>’ + | this ‘:’ Type ‘=>’ +``` + +A _template_ defines the type signature, behavior and initial state of a trait or class of objects or of a single object. +Templates form part of instance creation expressions, class definitions, and object definitions. +A template `´sc´ with ´mt_1´ with ... with ´mt_n´ { ´\mathit{stats}´ }` consists of a constructor invocation ´sc´ which defines the template's _superclass_, trait references `´mt_1, ..., mt_n´` ´(n \geq 0)´, which define the template's _traits_, and a statement sequence ´\mathit{stats}´ which contains initialization code and additional member definitions for the template. + +Each trait reference ´mt_i´ must denote a [trait](#traits). +By contrast, the superclass constructor ´sc´ normally refers to a class which is not a trait. +It is possible to write a list of parents that starts with a trait reference, e.g. `´mt_1´ with ... with ´mt_n´`. +In that case the list of parents is implicitly extended to include the supertype of ´mt_1´ as the first parent type. +The new supertype must have at least one constructor that does not take parameters. +In the following, we will always assume that this implicit extension has been performed, so that the first parent class of a template is a regular superclass constructor, not a trait reference. + +The list of parents of a template must be well-formed. +This means that the class denoted by the superclass constructor ´sc´ must be a subclass of the superclasses of all the traits ´mt_1, ..., mt_n´. +In other words, the non-trait classes inherited by a template form a chain in the inheritance hierarchy which starts with the template's superclass. + +It is forbidden for a template's superclass constructor ´sc´ to be an [enum class](#enum-definitions), unless the template is the implementation of an [enum case](#enum-definitions) of ´sc´. + +The _least proper supertype_ of a template is the class type or [compound type](03-types.html#compound-types) consisting of all its parent class types. + +The statement sequence ´\mathit{stats}´ contains member definitions that define new members or overwrite members in the parent classes. +If the template forms part of an abstract class or trait definition, the statement part ´\mathit{stats}´ may also contain declarations of abstract members. +If the template forms part of a concrete class definition, ´\mathit{stats}´ may still contain declarations of abstract type members, but not of abstract term members. +Furthermore, ´\mathit{stats}´ may in any case also contain expressions; these are executed in the order they are given as part of the initialization of a template. + +The sequence of template statements may be prefixed with a formal parameter definition and an arrow, e.g. `´x´ =>`, or `´x´:´T´ =>`. +If a formal parameter is given, it can be used as an alias for the reference `this` throughout the body of the template. +If the formal parameter comes with a type ´T´, this definition affects the _self type_ ´S´ of the underlying class or object as follows: +Let ´C´ be the type of the class or trait or object defining the template. +If a type ´T´ is given for the formal self parameter, ´S´ is the greatest lower bound of ´T´ and ´C´. +If no type ´T´ is given, ´S´ is just ´C´. +Inside the template, the type of `this` is assumed to be ´S´. + +The self type of a class or object must conform to the self types of all classes which are inherited by the template ´t´. + +A second form of self type annotation reads just `this: ´S´ =>`. +It prescribes the type ´S´ for `this` without introducing an alias name for it. + +###### Example +Consider the following class definitions: + +```scala +class Base extends Object {} +trait Mixin extends Base {} +object O extends Mixin {} +``` + +In this case, the definition of `O` is expanded to: + +```scala +object O extends Base with Mixin {} +``` + + + +**Inheriting from Java Types** + +A template may have a Java class as its superclass and Java interfaces as its mixins. + +**Template Evaluation** + +Consider a template `´sc´ with ´mt_1´ with ´mt_n´ { ´\mathit{stats}´ }`. + +If this is the template of a [trait](#traits) then its _mixin-evaluation_ consists of an evaluation of the statement sequence ´\mathit{stats}´. + +If this is not a template of a trait, then its _evaluation_ consists of the following steps. + +- First, the superclass constructor ´sc´ is + [evaluated](#constructor-invocations). +- Then, all base classes in the template's [linearization](#class-linearization) up to the template's superclass denoted by ´sc´ are evaluated. +evaluation happens in reverse order of occurrence in the linearization. Each evaluation occurs as follows: + - First, arguments to ´mt_i´ are evaluated from left to right, and set as parameters of ´mt_i´. + - ´mt_i´ is then mixin-evaluated. +- Finally, the statement sequence ´\mathit{stats}\,´ is evaluated. + +### Constructor Invocations + +```ebnf +Constr ::= AnnotType {‘(’ [Exprs] ‘)’} +``` + +Constructor invocations define the type, members, and initial state of objects created by an instance creation expression, or of parts of an object's definition which are inherited by a class or object definition. +A constructor invocation is a method application `´x´.´c´[´\mathit{targs}´](´\mathit{args}_1´)...(´\mathit{args}_n´)`, where ´x´ is a [stable identifier](03-types.html#paths), ´c´ is a type name which either designates a class or defines an alias type for one, ´\mathit{targs}´ is a type argument list, ´\mathit{args}_1, ..., \mathit{args}_n´ are argument lists, and there is a constructor of that class which is [applicable](06-expressions.html#method-applications) to the given arguments. +If the constructor invocation uses named or default arguments, it is transformed into a block expression using the same transformation as described [here](sec:named-default). + +The prefix `´x´.` can be omitted. +A type argument list can be given only if the class ´c´ takes type parameters. +Even then it can be omitted, in which case a type argument list is synthesized using [local type inference](06-expressions.html#local-type-inference). +If no explicit arguments are given, an empty list `()` is implicitly supplied. + +An evaluation of a constructor invocation `´x´.´c´[´\mathit{targs}´](´\mathit{args}_1´)...(´\mathit{args}_n´)` consists of the following steps: + +- First, the prefix ´x´ is evaluated. +- Then, the arguments ´\mathit{args}_1, ..., \mathit{args}_n´ are evaluated from left to right. +- Finally, the class being constructed is initialized by evaluating the template of the class referred to by ´c´. + +### Class Linearization + +The classes reachable through transitive closure of the direct inheritance relation from a class ´C´ are called the _base classes_ of ´C´. +Because of mixins, the inheritance relationship on base classes forms in general a directed acyclic graph. +A linearization of this graph is defined as follows. + +###### Definition: linearization +Let ´C´ be a class with template ´C_1´ with ... with ´C_n´ { ´\mathit{stats}´ }`. +The _linearization_ of ´C´, ´\mathcal{L}(C)´ is defined as follows: +$$ +\mathcal{L}(C) = C, \mathcal{L}(C_n) \; \vec{+} \; ... \; \vec{+} \; \mathcal{L}(C_1) +$$ + +Here ´\vec{+}´ denotes concatenation where elements of the right operand replace identical elements of the left operand: + +$$ +\begin{array}{lcll} +\{a, A\} \;\vec{+}\; B &=& a, (A \;\vec{+}\; B) &{\bf if} \; a \not\in B \\\\ + &=& A \;\vec{+}\; B &{\bf if} \; a \in B +\end{array} +$$ + +###### Example +Consider the following class definitions. + +```scala +abstract class AbsIterator extends AnyRef { ... } +trait RichIterator extends AbsIterator { ... } +class StringIterator extends AbsIterator { ... } +class Iter extends StringIterator with RichIterator { ... } +``` + +Then the linearization of class `Iter` is + +```scala +{ Iter, RichIterator, StringIterator, AbsIterator, AnyRef, Any } +``` + +Note that the linearization of a class refines the inheritance relation: if ´C´ is a subclass of ´D´, then ´C´ precedes ´D´ in any linearization where both ´C´ and ´D´ occur. +[Linearization](#definition:-linearization) also satisfies the property that a linearization of a class always contains the linearization of its direct superclass as a suffix. + +For instance, the linearization of `StringIterator` is + +```scala +{ StringIterator, AbsIterator, AnyRef, Any } +``` + +which is a suffix of the linearization of its subclass `Iter`. +The same is not true for the linearization of mixins. +For instance, the linearization of `RichIterator` is + +```scala +{ RichIterator, AbsIterator, AnyRef, Any } +``` + +which is not a suffix of the linearization of `Iter`. + +### Class Members + +A class ´C´ defined by a template `´C_1´ with ... with ´C_n´ { ´\mathit{stats}´ }` can define members in its statement sequence ´\mathit{stats}´ and can inherit members from all parent classes. +Scala adopts Java and C\#'s conventions for static overloading of methods. +It is thus possible that a class defines and/or inherits several methods with the same name. +To decide whether a defined member of a class ´C´ overrides a member of a parent class, or whether the two co-exist as overloaded variants in ´C´, Scala uses the following definition of _matching_ on members: + +###### Definition: matching +A member definition ´M´ _matches_ a member definition ´M'´, if ´M´ and ´M'´ bind the same name, and one of following holds. + +1. Neither ´M´ nor ´M'´ is a method definition. +2. ´M´ and ´M'´ define both monomorphic methods with equivalent argument types. +3. ´M´ is defined in Java and defines a method with an empty parameter list `()` and ´M'´ defines a parameterless method. +4. ´M´ and ´M'´ define both polymorphic methods with equal number of argument types ´\overline T´, ´\overline T'´ and equal numbers of type parameters ´\overline t´, ´\overline t'´, say, and ´\overline T' = [\overline t'/\overline t]\overline T´. + + + +Member definitions fall into two categories: concrete and abstract. +Members of class ´C´ are either _directly defined_ (i.e. they appear in ´C´'s statement sequence ´\mathit{stats}´) or they are _inherited_. +There are two rules that determine the set of members of a class, one for each category: + +A _concrete member_ of a class ´C´ is any concrete definition ´M´ in some class ´C_i \in \mathcal{L}(C)´, except if there is a preceding clas ´C_j \in \mathcal{L}(C)´ where ´j < i´ which directly defines a concrete member ´M'´ matching ´M´. + +An _abstract member_ of a class ´C´ is any abstract definition ´M´ in some class ´C_i \in \mathcal{L}(C)´, except if ´C´ contains already a concrete member ´M'´ matching ´M´, or if there is a preceding class ´C_j \in \mathcal{L}(C)´ where ´j < i´ which directly defines an abstract member ´M'´ matching ´M´. + +This definition also determines the [overriding](#overriding) relationships between matching members of a class ´C´ and its parents. +First, a concrete definition always overrides an abstract definition. +Second, for definitions ´M´ and ´M´' which are both concrete or both abstract, ´M´ overrides ´M'´ if ´M´ appears in a class that precedes (in the linearization of ´C´) the class in which ´M'´ is defined. + +It is an error if a template directly defines two matching members. +It is also an error if a template contains two members (directly defined or inherited) with the same name and the same [erased type](03-types.html#type-erasure). +Finally, a template is not allowed to contain two methods (directly defined or inherited) with the same name which both define default arguments. + +###### Example +Consider the trait definitions: + +```scala +trait A { def f: Int } +trait B extends A { def f: Int = 1 ; def g: Int = 2 ; def h: Int = 3 } +trait C extends A { override def f: Int = 4 ; def g: Int } +trait D extends B with C { def h: Int } +``` + +Then trait `D` has a directly defined abstract member `h`. +It inherits member `f` from trait `C` and member `g` from trait `B`. + +### Overriding + + + +A member ´M´ of class ´C´ that [matches](#class-members) a non-private member ´M'´ of a base class of ´C´ is said to _override_ that member. +In this case the binding of the overriding member ´M´ must [subsume](03-types.html#conformance) the binding of the overridden member ´M'´. +Furthermore, the following restrictions on modifiers apply to ´M´ and ´M'´: +- ´M'´ must not be a class. +- ´M'´ must not be labeled `final`. +- ´M´ must not be [`private`](#modifiers). +- If ´M´ is labeled `private[´C´]` for some enclosing class or package ´C´, then ´M'´ must be labeled `private[´C'´]` for some class or package ´C'´ where ´C'´ equals ´C´ or ´C'´ is contained in ´C´. + + +- If ´M´ is labeled `protected`, then ´M'´ must also be labeled `protected`. +- If ´M'´ is not an abstract member, then ´M´ must be labeled `override`. +Furthermore, one of two possibilities must hold: + - either ´M´ is defined in a subclass of the class where is ´M'´ is defined, + - or both ´M´ and ´M'´ override a third member ´M''´ which is defined in a base class of both the classes containing ´M´ and ´M'´ +- If ´M'´ is [incomplete](#modifiers) in ´C´ then ´M´ must be labeled `abstract override`. +- If ´M´ and ´M'´ are both concrete value definitions, then either none of them is marked `lazy` or both must be marked `lazy`. + +- A stable member can only be overridden by a stable member. +For example, this is not allowed: + +```scala +class X { val stable = 1} +class Y extends X { override var stable = 1 } // error +``` + +Another restriction applies to abstract type members: +An abstract type member with a [volatile type](03-types.html#volatile-types) as its upper bound may not override an abstract type member which does not have a volatile upper bound. + +A special rule concerns parameterless methods. +If a parameterless method defined as `def ´f´: ´T´ = ...` or `def ´f´ = ...` overrides a method defined in Java of type ´()T'´ which has an empty parameter list, then ´f´ is also assumed to have an empty parameter list. + +An overriding method inherits all default arguments from the definition in the superclass. +By specifying default arguments in the overriding method it is possible to add new defaults (if the corresponding parameter in the superclass does not have a default) or to override the defaults of the superclass (otherwise). + +###### Example + +Consider the definitions: + +```scala +trait Root { type T <: Root } +trait A extends Root { type T <: A } +trait B extends Root { type T <: B } +trait C extends A with B +``` + +Then the class definition `C` is not well-formed because the binding of `T` in `C` is `type T <: B`, which fails to subsume the binding `type T <: A` of `T` +in type `A`. +The problem can be solved by adding an overriding definition of type `T` in class `C`: + +```scala +class C extends A with B { type T <: C } +``` + +### Inheritance Closure + +Let ´C´ be a class type. +The _inheritance closure_ of ´C´ is the smallest set ´\mathscr{S}´ of types such that + +- ´C´ is in ´\mathscr{S}´. +- If ´T´ is in ´\mathscr{S}´, then every type ´T'´ which forms syntactically a part of ´T´ is also in ´\mathscr{S}´. +- If ´T´ is a class type in ´\mathscr{S}´, then all [parents](#templates) of ´T´ are also in ´\mathscr{S}´. + +It is a static error if the inheritance closure of a class type consists of an infinite number of types. +(This restriction is necessary to make subtyping decidable[^kennedy]). + +[^kennedy]: Kennedy, Pierce. [On Decidability of Nominal Subtyping with Variance.]( https://research.microsoft.com/pubs/64041/fool2007.pdf) in FOOL 2007 + +## Modifiers + +```ebnf +Modifier ::= LocalModifier + | AccessModifier + | ‘override’ +LocalModifier ::= ‘abstract’ + | ‘final’ + | ‘sealed’ + | ‘implicit’ + | ‘lazy’ +AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] +AccessQualifier ::= ‘[’ (id | ‘this’) ‘]’ +``` + +Member definitions may be preceded by modifiers which affect the accessibility and usage of the identifiers bound by them. +If several modifiers are given, their order does not matter, but the same modifier may not occur more than once. +Modifiers preceding a repeated definition apply to all constituent definitions. +The rules governing the validity and meaning of a modifier are as follows. + +### `private` +The `private` modifier can be used with any definition or declaration in a template. +Private members of a template can be accessed only from within the directly enclosing template and its companion module or [companion class](#object-definitions). + +The `private` modifier is also valid for [top-level](09-top-level-definitions.html#packagings) templates. + +A `private` modifier can be _qualified_ with an identifier ´C´ (e.g. `private[´C´]`) that must denote a class or package enclosing the definition. +Members labeled with such a modifier are accessible respectively only from code inside the package ´C´ or only from code inside the class ´C´ and its [companion module](#object-definitions). + +A different form of qualification is `private[this]`. +A member ´M´ marked with this modifier is called _object-protected_; it can be accessed only from within the object in which it is defined. +That is, a selection ´p.M´ is only legal if the prefix is `this` or `´O´.this`, for some class ´O´ enclosing the reference. +In addition, the restrictions for unqualified `private` apply. + +Members marked private without a qualifier are called _class-private_, whereas members labeled with `private[this]` are called _object-private_. +A member _is private_ if it is either class-private or object-private, but not if it is marked `private[´C´]` where ´C´ is an identifier; in the latter case the member is called _qualified private_. + +Class-private or object-private members may not be abstract, and may not have `protected` or `override` modifiers. +They are not inherited by subclasses and they may not override definitions in parent classes. + +### `protected` +The `protected` modifier applies to class member definitions. +Protected members of a class can be accessed from within + - the template of the defining class, + - all templates that have the defining class as a base class, + - the companion module of any of those classes. + +A `protected` modifier can be qualified with an identifier ´C´ (e.g. `protected[´C´]`) that must denote a class or package enclosing the definition. +Members labeled with such a modifier are also accessible respectively from all code inside the package ´C´ or from all code inside the class ´C´ and its [companion module](#object-definitions). + +A protected identifier ´x´ may be used as a member name in a selection `´r´.´x´` only if one of the following applies: + - The access is within the template defining the member, or, if a qualification ´C´ is given, inside the package ´C´, or the class ´C´, or its companion module, or + - ´r´ is one of the reserved words `this` and `super`, or + - ´r´'s type conforms to a type-instance of the class which contains the access. + +A different form of qualification is `protected[this]`. +A member ´M´ marked with this modifier is called _object-protected_; it can be accessed only from within the object in which it is defined. That is, a selection ´p.M´ is only legal if the prefix is `this` or `´O´.this`, for some class ´O´ enclosing the reference. In addition, the restrictions for unqualified `protected` apply. + +### `override` +The `override` modifier applies to class member definitions or declarations. +It is mandatory for member definitions or declarations that override some other concrete member definition in a parent class. +If an `override` modifier is given, there must be at least one overridden member definition or declaration (either concrete or abstract). + +### `abstract override` +The `override` modifier has an additional significance when combined with the `abstract` modifier. +That modifier combination is only allowed for value members of traits. + +We call a member ´M´ of a template _incomplete_ if it is either abstract (i.e. defined by a declaration), or it is labeled `abstract` and `override` and every member overridden by ´M´ is again incomplete. + +Note that the `abstract override` modifier combination does not influence the concept whether a member is concrete or abstract. +A member is _abstract_ if only a declaration is given for it; it is _concrete_ if a full definition is given. + +### `abstract` +The `abstract` modifier is used in class definitions. +It is redundant for traits, and mandatory for all other classes which have incomplete members. +Abstract classes cannot be [instantiated](06-expressions.html#instance-creation-expressions) with a constructor invocation unless followed by mixins and/or a refinement which override all incomplete members of the class. +Only abstract classes and traits can have abstract term members. + +The `abstract` modifier can also be used in conjunction with `override` for class member definitions. +In that case the previous discussion applies. + +### `final` +The `final` modifier applies to class member definitions and to class definitions. +A `final` class member definition may not be overridden in subclasses. +A `final` class may not be inherited by a template. +`final` is redundant for object definitions. +Members of final classes or objects are implicitly also final, so the `final` modifier is generally redundant for them, too. +Note, however, that [constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) do require an explicit `final` modifier, even if they are defined in a final class or object. +`final` is permitted for abstract classes but it may not be applied to traits or incomplete members, and it may not be combined in one modifier list with `sealed`. + +### `sealed` +The `sealed` modifier applies to class definitions. +A `sealed` class may not be directly inherited, except if the inheriting template is defined in the same source file as the inherited class. +However, subclasses of a sealed class can be inherited anywhere. + +### `lazy` +The `lazy` modifier applies to value definitions. +A `lazy` value is initialized the first time it is accessed (which might never +happen at all). +Attempting to access a lazy value during its initialization might lead to looping behavior. +If an exception is thrown during initialization, the value is considered uninitialized, and a later access will retry to evaluate its right hand side. + +###### Example +The following code illustrates the use of qualified private: + +```scala +package outerpkg.innerpkg +class Outer { + class Inner { + private[Outer] def f() + private[innerpkg] def g() + private[outerpkg] def h() + } +} +``` + +Here, accesses to the method `f` can appear anywhere within `Outer`, but not outside it. +Accesses to method `g` can appear anywhere within the package `outerpkg.innerpkg`, as would be the case for package-private methods in Java. +Finally, accesses to method `h` can appear anywhere within package `outerpkg`, including packages contained in it. + +###### Example +A useful idiom to prevent clients of a class from constructing new instances of that class is to declare the class `abstract` and `sealed`: + +```scala +object m { + abstract sealed class C (x: Int) { + def nextC = new C(x + 1) {} + } + val empty = new C(0) {} +} +``` + +For instance, in the code above clients can create instances of class `m.C` only by calling the `nextC` method of an existing `m.C` object; it is not possible for clients to create objects of class `m.C` directly. +Indeed the following two lines are both in error: + +```scala +new m.C(0) // **** error: C is abstract, so it cannot be instantiated. +new m.C(0) {} // **** error: illegal inheritance from sealed class. +``` + +A similar access restriction can be achieved by marking the primary constructor `private` ([example](#example-private-constructor)). + +## Class Definitions + +```ebnf +TmplDef ::= ‘class’ ClassDef +ClassDef ::= id [TypeParamClause] {Annotation} + [AccessModifier] ClassParamClauses ClassTemplateOpt +ClassParamClauses ::= {ClassParamClause} + [[nl] ‘(’ implicit ClassParams ‘)’] +ClassParamClause ::= [nl] ‘(’ [ClassParams] ‘)’ +ClassParams ::= ClassParam {‘,’ ClassParam} +ClassParam ::= {Annotation} {Modifier} [(‘val’ | ‘var’)] + id [‘:’ ParamType] [‘=’ Expr] +ClassTemplateOpt ::= ‘extends’ ClassTemplate | [[‘extends’] TemplateBody] +``` + +The most general form of class definition is + +```scala +class ´c´[´\mathit{tps}\,´] ´as´ ´m´(´\mathit{ps}_1´)...(´\mathit{ps}_n´) extends ´t´ ´\quad(n \geq 0)´. +``` + +Here, + + - ´c´ is the name of the class to be defined. + - ´\mathit{tps}´ is a non-empty list of type parameters of the class being defined. + The scope of a type parameter is the whole class definition including the type parameter section itself. + It is illegal to define two type parameters with the same name. + The type parameter section `[´\mathit{tps}\,´]` may be omitted. + A class with a type parameter section is called _polymorphic_, otherwise it is called _monomorphic_. + - ´as´ is a possibly empty sequence of [annotations](11-annotations.html#user-defined-annotations). + If any annotations are given, they apply to the primary constructor of the class. + - ´m´ is an [access modifier](#modifiers) such as `private` or `protected`, possibly with a qualification. + If such an access modifier is given it applies to the primary constructor of the class. + - ´(\mathit{ps}\_1)...(\mathit{ps}\_n)´ are formal value parameter clauses for the _primary constructor_ of the class. + The scope of a formal value parameter includes all subsequent parameter sections and the template ´t´. + However, a formal value parameter may not form part of the types of any of the parent classes or members of the class template ´t´. + It is illegal to define two formal value parameters with the same name. + + If a class has no formal parameter section that is not implicit, an empty parameter section `()` is assumed. + + If a formal parameter declaration ´x: T´ is preceded by a `val` or `var` keyword, an accessor (getter) [definition](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) for this parameter is implicitly added to the class. + + The getter introduces a value member ´x´ of class ´c´ that is defined as an alias of the parameter. + If the introducing keyword is `var`, a setter accessor [`´x´_=`](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) is also implicitly added to the class. + In invocation of that setter `´x´_=(´e´)` changes the value of the parameter to the result of evaluating ´e´. + + The formal parameter declaration may contain modifiers, which then carry over to the accessor definition(s). + When access modifiers are given for a parameter, but no `val` or `var` keyword, `val` is assumed. + A formal parameter prefixed by `val` or `var` may not at the same time be a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters). + + - ´t´ is a [template](#templates) of the form + + ```scala + ´sc´ with ´mt_1´ with ... with ´mt_m´ { ´\mathit{stats}´ } // ´m \geq 0´ + ``` + + which defines the base classes, behavior and initial state of objects of the class. + The extends clause `extends ´sc´ with ´mt_1´ with ... with ´mt_m´` can be omitted, in which case `extends scala.AnyRef` is assumed. + The class body `{ ´\mathit{stats}´ }` may also be omitted, in which case the empty body `{}` is assumed. + +This class definition defines a type `´c´[´\mathit{tps}\,´]` and a constructor which when applied to parameters conforming to types ´\mathit{ps}´ initializes instances of type `´c´[´\mathit{tps}\,´]` by evaluating the template ´t´. + +###### Example – `val` and `var` parameters +The following example illustrates `val` and `var` parameters of a class `C`: + +```scala +class C(x: Int, val y: String, var z: List[String]) +val c = new C(1, "abc", List()) +c.z = c.y :: c.z +``` + +###### Example – Private Constructor +The following class can be created only from its companion module. + +```scala +object Sensitive { + def makeSensitive(credentials: Certificate): Sensitive = + if (credentials == Admin) new Sensitive() + else throw new SecurityViolationException +} +class Sensitive private () { + ... +} +``` + +### Constructor Definitions + +```ebnf +FunDef ::= ‘this’ ParamClause ParamClauses + (‘=’ ConstrExpr | [nl] ConstrBlock) +ConstrExpr ::= SelfInvocation + | ConstrBlock +ConstrBlock ::= ‘{’ SelfInvocation {semi BlockStat} ‘}’ +SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} +``` + +A class may have additional constructors besides the primary constructor. +These are defined by constructor definitions of the form `def this(´\mathit{ps}_1´)...(´\mathit{ps}_n´) = ´e´`. +Such a definition introduces an additional constructor for the enclosing class, with parameters as given in the formal parameter lists ´\mathit{ps}_1 , ..., \mathit{ps}_n´, and whose evaluation is defined by the constructor expression ´e´. +The scope of each formal parameter is the subsequent parameter sections and the constructor expression ´e´. +A constructor expression is either a self constructor invocation `this(´\mathit{args}_1´)...(´\mathit{args}_n´)` or a block which begins with a self constructor invocation. +The self constructor invocation must construct a generic instance of the class. +I.e. if the class in question has name ´C´ and type parameters `[´\mathit{tps}\,´]`, then a self constructor invocation must generate an instance of `´C´[´\mathit{tps}\,´]`; it is not permitted to instantiate formal type parameters. + +The signature and the self constructor invocation of a constructor definition are type-checked and evaluated in the scope which is in effect at the point of the enclosing class definition, augmented by any type parameters of the enclosing class. +The rest of the constructor expression is type-checked and evaluated as a method body in the current class. + +If there are auxiliary constructors of a class ´C´, they form together with ´C´'s primary [constructor](#class-definitions) an overloaded constructor definition. +The usual rules for [overloading resolution](06-expressions.html#overloading-resolution) apply for constructor invocations of ´C´, including for the self constructor invocations in the constructor expressions themselves. +However, unlike other methods, constructors are never inherited. +To prevent infinite cycles of constructor invocations, there is the restriction that every self constructor invocation must refer to a constructor definition which precedes it (i.e. it must refer to either a preceding auxiliary constructor or the primary constructor of the class). + +###### Example +Consider the class definition + +```scala +class LinkedList[A]() { + var head: A = _ + var tail: LinkedList[A] = null + def this(head: A) = { this(); this.head = head } + def this(head: A, tail: LinkedList[A]) = { this(head); this.tail = tail } +} +``` + +This defines a class `LinkedList` with three constructors. +The second constructor constructs a singleton list, while the third one constructs a list with a given head and tail. + +### Case Classes + +```ebnf +TmplDef ::= ‘case’ ‘class’ ClassDef +``` + +If a class definition is prefixed with `case`, the class is said to be a _case class_. + +A case class is required to have a parameter section that is not implicit. +The formal parameters in the first parameter section are called _elements_ and are treated specially. +First, the value of such a parameter can be extracted as a field of a constructor pattern. +Second, a `val` prefix is implicitly added to such a parameter, unless the parameter already carries a `val` or `var` modifier. +Hence, an accessor definition for the parameter is [generated](#class-definitions). + +A case class definition of `´c´[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` with type parameters ´\mathit{tps}´ and value parameters ´\mathit{ps}´ implies the definition of a companion object, which serves as an [extractor object](08-pattern-matching.html#extractor-patterns). +It has the following shape: + +```scala +object ´c´ { + def apply[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´): ´c´[´\mathit{tps}\,´] = new ´c´[´\mathit{Ts}\,´](´\mathit{xs}_1\,´)...(´\mathit{xs}_n´) + def unapply[´\mathit{tps}\,´](´x´: ´c´[´\mathit{tps}\,´]) = + if (x eq null) scala.None + else scala.Some(´x.\mathit{xs}_{11}, ... , x.\mathit{xs}_{1k}´) +} +``` + +Here, ´\mathit{Ts}´ stands for the vector of types defined in the type parameter section ´\mathit{tps}´, each ´\mathit{xs}\_i´ denotes the parameter names of the parameter section ´\mathit{ps}\_i´, and ´\mathit{xs}\_{11}, ... , \mathit{xs}\_{1k}´ denote the names of all parameters in the first parameter section ´\mathit{xs}\_1´. +If a type parameter section is missing in the class, it is also missing in the `apply` and `unapply` methods. + +If the companion object ´c´ is already defined, the `apply` and `unapply` methods are added to the existing object. +If the object ´c´ already has a [matching](#definition-matching) `apply` (or `unapply`) member, no new definition is added. +The definition of `apply` is omitted if class ´c´ is `abstract`. + +If the case class definition contains an empty value parameter list, the `unapply` method returns a `Boolean` instead of an `Option` type and is defined as follows: + +```scala +def unapply[´\mathit{tps}\,´](´x´: ´c´[´\mathit{tps}\,´]) = x ne null +``` + +The name of the `unapply` method is changed to `unapplySeq` if the first parameter section ´\mathit{ps}_1´ of ´c´ ends in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters). + +A method named `copy` is implicitly added to every case class unless the class already has a member (directly defined or inherited) with that name, or the class has a repeated parameter. +The method is defined as follows: + +```scala +def copy[´\mathit{tps}\,´](´\mathit{ps}'_1\,´)...(´\mathit{ps}'_n´): ´c´[´\mathit{tps}\,´] = new ´c´[´\mathit{Ts}\,´](´\mathit{xs}_1\,´)...(´\mathit{xs}_n´) +``` + +Again, `´\mathit{Ts}´` stands for the vector of types defined in the type parameter section `´\mathit{tps}´` and each `´xs_i´` denotes the parameter names of the parameter section `´ps'_i´`. +The value parameters `´ps'_{1,j}´` of first parameter list have the form `´x_{1,j}´:´T_{1,j}´=this.´x_{1,j}´`, the other parameters `´ps'_{i,j}´` of the `copy` method are defined as `´x_{i,j}´:´T_{i,j}´`. +In all cases `´x_{i,j}´` and `´T_{i,j}´` refer to the name and type of the corresponding class parameter `´\mathit{ps}_{i,j}´`. + +Every case class implicitly overrides some method definitions of class [`scala.AnyRef`](12-the-scala-standard-library.html#root-classes) unless a definition of the same method is already given in the case class itself or a concrete definition of the same method is given in some base class of the case class different from `AnyRef`. +In particular: + +- Method `equals: (Any)Boolean` is structural equality, where two instances are equal if they both belong to the case class in question and they have equal (with respect to `equals`) constructor arguments (restricted to the class's _elements_, i.e., the first parameter section). +- Method `hashCode: Int` computes a hash-code. If the hashCode methods of the data structure members map equal (with respect to equals) values to equal hash-codes, then the case class hashCode method does too. +- Method `toString: String` returns a string representation which contains the name of the class and its elements. + +###### Example +Here is the definition of abstract syntax for lambda calculus: + +```scala +class Expr +case class Var (x: String) extends Expr +case class Apply (f: Expr, e: Expr) extends Expr +case class Lambda(x: String, e: Expr) extends Expr +``` + +This defines a class `Expr` with case classes `Var`, `Apply` and `Lambda`. A call-by-value evaluator for lambda expressions could then be written as follows. + +```scala +type Env = String => Value +case class Value(e: Expr, env: Env) + +def eval(e: Expr, env: Env): Value = e match { + case Var (x) => + env(x) + case Apply(f, g) => + val Value(Lambda (x, e1), env1) = eval(f, env) + val v = eval(g, env) + eval (e1, (y => if (y == x) v else env1(y))) + case Lambda(_, _) => + Value(e, env) +} +``` + +It is possible to define further case classes that extend type `Expr` in other parts of the program, for instance + +```scala +case class Number(x: Int) extends Expr +``` + +This form of extensibility can be excluded by declaring the base class `Expr` `sealed`; in this case, all classes that directly extend `Expr` must be in the same source file as `Expr`. + +## Traits + +```ebnf +TmplDef ::= ‘trait’ ClassDef +``` + +A _trait_ is a class that is meant to be added to some other class as a mixin. +Furthermore, no constructor arguments are passed to the superclass of the trait. +This is not necessary as traits are initialized after the superclass is initialized. + +Assume a trait ´D´ defines some aspect of an instance ´x´ of type ´C´ (i.e. ´D´ is a base class of ´C´). +Then the _actual supertype_ of ´D´ in ´x´ is the compound type consisting of all the base classes in ´\mathcal{L}(C)´ that succeed ´D´. +The actual supertype gives the context for resolving a [`super` reference](06-expressions.html#this-and-super) in a trait. +Note that the actual supertype depends on the type to which the trait is added in a mixin composition; it is not statically known at the time the trait is defined. + +If ´D´ is not a trait, then its actual supertype is simply its least proper supertype (which is statically known). + +###### Example +The following trait defines the property of being comparable to objects of some type. +It contains an abstract method `<` and default implementations of the other comparison operators `<=`, `>`, and `>=`. + +```scala +trait Comparable[T <: Comparable[T]] { self: T => + def < (that: T): Boolean + def <=(that: T): Boolean = this < that || this == that + def > (that: T): Boolean = that < this + def >=(that: T): Boolean = that <= this +} +``` + +###### Example +Consider an abstract class `Table` that implements maps from a type of keys `A` to a type of values `B`. +The class has a method `set` to enter a new key / value pair into the table, and a method `get` that returns an optional value matching a given key. +Finally, there is a method `apply` which is like `get`, except that it returns a given default value if the table is undefined for the given key. +This class is implemented as follows. + +```scala +abstract class Table[A, B](defaultValue: B) { + def get(key: A): Option[B] + def set(key: A, value: B): Unit + def apply(key: A) = get(key) match { + case Some(value) => value + case None => defaultValue + } +} +``` + +Here is a concrete implementation of the `Table` class. + +```scala +class ListTable[A, B](defaultValue: B) extends Table[A, B](defaultValue) { + private var elems: List[(A, B)] = Nil + def get(key: A) = elems.find(_._1 == key).map(_._2) + def set(key: A, value: B) = { elems = (key, value) :: elems } +} +``` + +Here is a trait that prevents concurrent access to the `get` and `set` operations of its parent class: + +```scala +trait SynchronizedTable[A, B] extends Table[A, B] { + abstract override def get(key: A): B = + synchronized { super.get(key) } + abstract override def set(key: A, value: B) = + synchronized { super.set(key, value) } +} +``` + +Note that `SynchronizedTable` does not pass an argument to its superclass, `Table`, even though `Table` is defined with a formal parameter. +Note also that the `super` calls in `SynchronizedTable`'s `get` and `set` methods statically refer to abstract methods in class `Table`. +This is legal, as long as the calling method is labeled [`abstract override`](#modifiers). + +Finally, the following mixin composition creates a synchronized list table with strings as keys and integers as values and with a default value `0`: + +```scala +object MyTable extends ListTable[String, Int](0) with SynchronizedTable[String, Int] +``` + +The object `MyTable` inherits its `get` and `set` method from `SynchronizedTable`. +The `super` calls in these methods are re-bound to refer to the corresponding implementations in `ListTable`, which is the actual supertype of `SynchronizedTable` in `MyTable`. + +### Extending parameterized traits + +Extra rules apply for extending a trait with parameters: + +1. If a class `´C´` extends a parameterized trait `´T´`, and its superclass does not, `´C´` _must_ pass arguments to `´T´`. + +2. If a class `´C´` extends a parameterized trait `´T´`, and its superclass does as well, `´C´` _must not_ pass arguments to `´T´`. + +3. Traits must never pass arguments to parent traits. + +4. If a class `´C´` extends an unparameterized trait `´T_i´` and the base types of `´T_i´` include parameterized trait `´T_j´`, and the superclass of `´C´` does not extend `´T_j´`, then `´C´` _must_ also explicitly extend `´T_j´` and pass arguments. +This rule is relaxed if the missing trait contains only context parameters. In that case the trait reference is implicitly inserted as an additional parent with inferred arguments. + +###### Example - Preventing ambiguities + +The following listing tries to extend `Greeting` twice, with different parameters. + +```scala +trait Greeting(val name: String): + def msg = s"How are you, $name" + +class C extends Greeting("Bob") + +class D extends C, Greeting("Bill") // error + +@main def greet = println(D().msg) +``` + +Should this program print "Bob" or "Bill"? In fact this program is illegal, because it violates rule 2 above. +Instead, `D` can extend `Greeting` without passing arguments. + +###### Example - Overriding + +Here's a variant of `Greeting` that overrides `msg`: +```scala +trait FormalGreeting extends Greeting: + override def msg = s"How do you do, $name" +``` + +Due to rule 4, the following class extending `FormalGreeting` is required to also extend `Greeting` with arguments: +```scala +class GreetBobFormally extends FormalGreeting, Greeting("Bob") +``` + +###### Example - Inferred context parameters + +Here's a variant of `Greeting` where the addressee is a context parameter of type `ImpliedName`: + +```scala +trait ImpliedGreeting(using val iname: ImpliedName): + def msg = s"How are you, $iname" + +case class ImpliedName(name: String): + override def toString = name + +trait ImpliedFormalGreeting extends ImpliedGreeting: + override def msg = s"How do you do, $iname" + +class F(using iname: ImpliedName) extends ImpliedFormalGreeting +``` + +The definition of `F` in the last line is implicitly expanded to +```scala +class F(using iname: ImpliedName) extends + Object, // implicitly inserted + ImpliedGreeting(using iname), // implicitly inserted + ImpliedFormalGreeting +``` +Due to rule 4, `F` is required to also extend `ImpliedGreeting` and pass arguments to it, however note that because `ImpliedGreeting` has only context parameters the extension was added implicitly. + +## Object Definitions + +```ebnf +TmplDef ::= ‘object’ ObjectDef +ObjectDef ::= id ClassTemplate +``` + +An _object definition_ defines a single object of a new class. +Its most general form is `object ´m´ extends ´t´`. +Here, ´m´ is the name of the object to be defined, and ´t´ is a [template](#templates) of the form + +```scala +´sc´ with ´mt_1´ with ... with ´mt_n´ { ´\mathit{stats}´ } +``` + +which defines the base classes, behavior and initial state of ´m´. +The extends clause `extends ´sc´ with ´mt_1´ with ... with ´mt_n´` can be omitted, in which case `extends scala.AnyRef` is assumed. +The class body `{ ´\mathit{stats}´ }` may also be omitted, in which case the empty body `{}` is assumed. + +The object definition defines a single object (or: _module_) conforming to the template ´t´. +It is roughly equivalent to the following definition of a lazy value: + +```scala +lazy val ´m´ = new ´sc´ with ´mt_1´ with ... with ´mt_n´ { this: ´m.type´ => ´\mathit{stats}´ } +``` + +Note that the value defined by an object definition is instantiated lazily. +The `new ´m´$cls` constructor is evaluated not at the point of the object definition, but is instead evaluated the first time ´m´ is dereferenced during execution of the program (which might be never at all). +An attempt to dereference ´m´ again during evaluation of the constructor will lead to an infinite loop or run-time error. +Other threads trying to dereference ´m´ while the constructor is being evaluated block until evaluation is complete. + +The expansion given above is not accurate for top-level objects. +It cannot be because variable and method definition cannot appear on the top-level outside of a [package object](09-top-level-definitions.html#package-objects). +Instead, top-level objects are translated to static fields. + +###### Example +Classes in Scala do not have static members; however, an equivalent effect can be achieved by an accompanying object definition E.g. + +```scala +abstract class Point { + val x: Double + val y: Double + def isOrigin = (x == 0.0 && y == 0.0) +} +object Point { + val origin = new Point() { val x = 0.0; val y = 0.0 } +} +``` + +This defines a class `Point` and an object `Point` which contains `origin` as a member. +Note that the double use of the name `Point` is legal, since the class definition defines the name `Point` in the type name space, whereas the object definition defines a name in the term namespace. + +This technique is applied by the Scala compiler when interpreting a Java class with static members. +Such a class ´C´ is conceptually seen as a pair of a Scala class that contains all instance members of ´C´ and a Scala object that contains all static members of ´C´. + +Generally, a _companion module_ of a class is an object which has the same name as the class and is defined in the same scope and compilation unit. +Conversely, the class is called the _companion class_ of the module. + +Very much like a concrete class definition, an object definition may still contain declarations of abstract type members, but not of abstract term members. + +## Enum Definitions + + +```ebnf +TmplDef ::= ‘enum’ EnumDef +EnumDef ::= id ClassConstr [‘extends’ [ConstrApps]] EnumBody +EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ +EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase +EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps] | ids) +``` + +An _enum definition_ implies the definition of an _enum class_, a companion object, and one or more _enum cases_. + +Enum definitions are useful to encode both Generalised Algebraic Data Types and Enumerated Types. + +The compiler expands enum definitions to code that only uses Scala's other language features. +As such, enum definitions in Scala are convenient _syntactic sugar_, but they are not essential to understand Scala's core. + +We now explain the expansion of enum definitions in detail. +First, some terminology and notational conventions: + +- We use ´E´ as a name of an enum definition, and ´C´ as a name of an enum case that appears in ´E´. +- We use `<...>` for syntactic constructs that in some circumstances might be empty. +For instance, `` represents one or more parameter lists `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` or nothing at all. +- Enum classes fall into two categories: + - _parameterized_ enum classes have at least one of the following: + - a type parameter section, denoted as `[´\mathit{tps}\,´]`; + - one or more (possibly empty) parameter sections, denoted as `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. + - _unparameterized_ enum classes have no type parameter sections and no parameter sections. +- Enum cases fall into three categories: + + - _Class cases_ are those cases that are parameterized, either with a type parameter section `[´\mathit{tps}\,´]` or with one or more (possibly empty) parameter sections `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. + - _Simple cases_ are cases of an unparameterized enum that have neither parameters nor an extends clause or body. + That is, they consist of a name only. + - _Value cases_ are all cases that do not have a parameter section but that do have a (possibly generated) `extends` clause and/or a body. + +- Simple cases and value cases are collectively called _singleton cases_. + +###### Example + +An example enum for a `Planet` enumeration can be given as +```scala +enum Planet(mass: Double, radius: Double): + case Mercury extends Planet(3.303e+23, 2.4397e6) + case Venus extends Planet(4.869e+24, 6.0518e6) + case Earth extends Planet(5.976e+24, 6.37814e6) + case Mars extends Planet(6.421e+23, 3.3972e6) + case Jupiter extends Planet(1.9e+27, 7.1492e7) + case Saturn extends Planet(5.688e+26, 6.0268e7) + case Uranus extends Planet(8.686e+25, 2.5559e7) + case Neptune extends Planet(1.024e+26, 2.4746e7) + + private inline val G = 6.67300E-11 + def surfaceGravity = G * mass / (radius * radius) + def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity +end Planet +``` + +###### Example + +An example enum for the Option ADT can be given as +```scala +enum Option[+T]: + case Some(x: T) + case None +``` + +### Lowering of Enum Definitions + +###### Summary +An enum class is represented as a `sealed` class that extends the `scala.reflect.Enum` trait. + +Enum cases are represented as follows: +- a class case is mapped to a `case class`, +- a singleton case is mapped to a `val` definition, where + - Simple cases all share a single implementation class. + - Value cases will each be implemented by a unique class. + +###### Precise rules +The `scala.reflect.Enum` trait defines a single public method, `ordinal`: +```scala +package scala.reflect + +transparent trait Enum extends Any, Product, Serializable: + + def ordinal: Int +``` +There are nine desugaring rules. +Rule (1) desugars enum definitions. +Rules (2) and (3) desugar simple cases. +Rules (4) to (6) define `extends` clauses for cases that are missing them. +Rules (7) to (9) define how such cases with `extends` clauses map into `case class`es or `val`s. + +1. An `enum` definition + ```scala + enum ´E´ ... { } + ``` + expands to a `sealed abstract` class that extends the `scala.reflect.Enum` trait and an associated companion object that contains the defined cases, expanded according to rules (2 - 8). + The enum class starts with a compiler-generated import that imports the names `` of all cases so that they can be used without prefix in the class. + ```scala + sealed abstract class ´E´ ... extends with scala.reflect.Enum { + import ´E´.{ } + + } + object ´E´ { } + ``` + +2. A singleton case consisting of a comma-separated list of enum names + ```scala + case ´C_1´, ..., ´C_n´ + ``` + expands to + ```scala + case ´C_1´; ...; case ´C_n´ + ``` + Any modifiers or annotations on the original case extend to all expanded cases. + This result is then further rewritten by either (3 or 4). + +3. A singleton case without an extends clause + ```scala + case ´C´ + ``` + of an unparameterized enum `´E´` expands to the following simple enum case in `´E´`'s companion object: + ```scala + val ´C´ = $new(n, "C") + ``` + Here, `$new` is a private method that creates an instance of ´E´ (see below). + +4. A singleton case without an extends clause + ```scala + case ´C´ + ``` + of an enum `´E´` with type parameters + ```scala + ´\mathit{v}_1´ ´T_1´ >: ´L_1´ <: ´U_1´ , ... , ´\mathit{v}_n´ ´T_n´ >: ´L_n´ <: ´U_n´ (n > 0) + ``` + where each of the variances `´\mathit{v}_i´` is either `'+'` or `'-'`, expands to the following value enum case: + ```scala + case ´C´ extends ´E´[´B_1´, ..., ´B_n´] + ``` + where `´B_i´` is `´L_i´` if `´\mathit{v}_i´ = '+'` and `´U_i´` if `´\mathit{v}_i´ = '-'`. + This result is then further rewritten with rule (8). + **NOTE:** It is not permitted for enums with non-variant type parameters to have singleton cases without an extends clause. + +5. A class case without an extends clause + ```scala + case ´C´ + ``` + of an enum `´E´` that does not take type parameters expands to + ```scala + case ´C´ extends ´E´ + ``` + This result is then further rewritten with rule (9). + +6. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case with neither type parameters nor an extends clause + ```scala + case ´C´ + ``` + expands to + ```scala + case ´C´[´\mathit{tps}´] extends ´E´[´\mathit{tps}´] + ``` + This result is then further rewritten with rule (9). + For class cases that have type parameters themselves, an extends clause needs to be given explicitly. + + +7. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case without type parameters but with an extends clause + ```scala + case ´C´ extends + ``` + expands to + ```scala + case ´C´[´\mathit{tps}´] extends + ``` + provided at least one of the parameters `´\mathit{tps}´` is mentioned in a parameter type in `` or in a type argument in ``. + +8. A value case + ```scala + case ´C´ extends + ``` + expands to the following `val` definition in `´E´`'s companion object: + ```scala + val ´C´ = new { ; def ordinal = ´\mathit{n}´ } + ``` + where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0. + The anonymous class also implements the abstract `Product` methods that it inherits from `Enum`. + **NOTE:** It is an error if a value case refers to a type parameter of `´E´` in a type argument within ``. + +9. A class case + ```scala + case ´C´ extends + ``` + expands analogous to a final case class in `´E´`'s companion object: + ```scala + final case class ´C´ extends { + def ordinal = ´\mathit{n}´ + } + ``` + where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0. + **NOTE:** It is an error if a class case refers to a type parameter of `´E´` in a parameter type in `` or `` or in a type argument of ``, unless that parameter is already a type parameter of the case, i.e. the parameter name is defined in ``. + +###### Superclass of an enum case + +an enum case (singleton or class) with explicit extends clause +```scala +case ´C´ extends +``` + +must extend the parent enum `´E´` as the first parent of ``. + +###### Example +Consider the enumeration `RGB`, consisting of simple enum cases: +```scala +enum RGB: + case Red, Green, Blue +``` + +The three simple cases will expand as follows in the companion of `RGB`: + +```scala +val Red = $new(0, "Red") +val Green = $new(1, "Green") +val Blue = $new(2, "Blue") + +private def $new(_$ordinal: Int, $name: String) = + new RGB with scala.runtime.EnumValue: + def ordinal = _$ordinal + override def productPrefix = $name + override def toString = $name +``` + + +###### Example + +Consider the more complex enumeration `Color`, consisting of value enum cases: +```scala +enum Color(val rgb: Int): + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) +``` + +The three value cases will expand as follows in the companion of `Color`: + +```scala +val Red = new Color(0xFF0000): + def ordinal: Int = 0 + override def productPrefix: String = "Red" + override def toString: String = "Red" +val Green = new Color(0x00FF00): + def ordinal: Int = 1 + override def productPrefix: String = "Green" + override def toString: String = "Green" +val Blue = new Color(0x0000FF): + def ordinal: Int = 2 + override def productPrefix: String = "Blue" + override def toString: String = "Blue" +``` + +### Widening of enum cases post-construction +The compiler-generated `apply` and `copy` methods of an class enum case +```scala +case ´C´[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ´P_1´, ..., ´P_n´ +``` +are treated specially. +A call `´C´[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` of the `apply` method is ascribed the underlying type `´P_1´ & ... & ´P_n´` (dropping any [transparent traits](../other-new-features/transparent-traits.md)) as long as that type is still compatible with the expected type at the point of application. +A call `t.copy[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` of `´C´`'s `copy` method is treated in the same way. + +### Translation of enums with only singleton cases + +An enum `´E´` (possibly generic) that defines one or more singleton cases, and no class cases will define the following additional synthetic members in its companion object (where `´E'´` denotes `´E´` with any type parameters replaced by wildcards): + + - A method `valueOf(name: String): ´E'´`. + It returns the singleton case value whose identifier is `name`. + - A method `values` which returns an `Array[´E'´]` of all singleton case values defined by `E`, in the order of their definitions. + +### Factory method for simple enum cases + +If an enum `´E´` contains at least one simple case, its companion object will define in addition: + + - A private method `$new` which defines a new simple case value with given ordinal number and name. + This method can be thought as being defined as follows. + + ```scala + private def $new(_$ordinal: Int, $name: String): ´E´ with runtime.EnumValue + ``` + - `$new` returns a new instance of an anonymous class which implements the abstract `Product` methods that it inherits from `Enum`. + - if `´E´` inherits from `java.lang.Enum` the anonymous class does not override the `ordinal` or `toString` methods, as these are final in `java.lang.Enum`. + Additionally `productPrefix` will delegate to `this.name`. + +### Translation of Java-compatible enums + +A Java-compatible enum is an enum that extends `java.lang.Enum`. +The translation rules are the same as above, with the reservations defined in this section. + +- It is a compile-time error for a Java-compatible enum to have class cases. + +- Cases such as `case C` expand to a `@static val` as opposed to a `val`. +This allows them to be generated as static fields of the enum type, thus ensuring they are represented the same way as Java enums. + +### Scopes for Enum Cases + +A case in an `enum` is treated similarly to a secondary constructor. +It can access neither the enclosing `enum` using `this`, nor its value parameters or instance members using simple identifiers. + +Even though translated enum cases are located in the enum's companion object, referencing this object or its members via `this` or a simple identifier is also illegal. +The compiler typechecks enum cases in the scope of the enclosing companion object but flags any such illegal accesses as errors. + +### Variance for Type Parameters + +A parameterized enum case ´C´ of enum ´E´ with _inferred_ type parameters will copy variance annotations. +e.g. type parameter ´T_{i}´ from ´E´ will have the same variance as type parameter `´T'_{i}´` in ´C´. + +###### Example + +The following enum `View` has a contravariant type parameter ´T´ and a single case `Refl`, representing a function mapping a type `T` to itself: + +```scala +enum View[-´T´]: + case Refl(f: ´T´ => ´T´) +``` + +`Refl` expands to the following enum: + +```scala +enum View[-´T´]: + case Refl[-´T'´](f: ´T'´ => ´T'´) extends View[´T'´] +``` + +The definition of `Refl` is incorrectly typed, as it uses contravariant type `´T'´` in the covariant result position of a function type. + +A correctly typed version would use an _explicit_, _invariant_ type parameter `´R´` on case `Refl`: + +```scala +enum View[-´T´]: + case Refl[´R´](f: ´R´ => ´R´) extends View[´R´] +``` \ No newline at end of file diff --git a/docs/_spec/06-expressions.md b/docs/_spec/06-expressions.md new file mode 100644 index 000000000000..fa21b4330728 --- /dev/null +++ b/docs/_spec/06-expressions.md @@ -0,0 +1,1412 @@ +--- +title: Expressions +layout: default +chapter: 6 +--- + +# Expressions + +```ebnf +Expr ::= (Bindings | id | ‘_’) ‘=>’ Expr + | Expr1 +Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] + | ‘while’ ‘(’ Expr ‘)’ {nl} Expr + | ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] + | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr + | ‘throw’ Expr + | ‘return’ [Expr] + | [SimpleExpr ‘.’] id ‘=’ Expr + | SimpleExpr1 ArgumentExprs ‘=’ Expr + | PostfixExpr + | PostfixExpr Ascription + | PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ +PostfixExpr ::= InfixExpr [id [nl]] +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr +PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr +SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody) + | BlockExpr + | SimpleExpr1 [‘_’] +SimpleExpr1 ::= Literal + | Path + | ‘_’ + | ‘(’ [Exprs] ‘)’ + | SimpleExpr ‘.’ id + | SimpleExpr TypeArgs + | SimpleExpr1 ArgumentExprs + | XmlExpr +Exprs ::= Expr {‘,’ Expr} +BlockExpr ::= ‘{’ CaseClauses ‘}’ + | ‘{’ Block ‘}’ +Block ::= BlockStat {semi BlockStat} [ResultExpr] +ResultExpr ::= Expr1 + | (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block +Ascription ::= ‘:’ InfixType + | ‘:’ Annotation {Annotation} + | ‘:’ ‘_’ ‘*’ +``` + +Expressions are composed of operators and operands. +Expression forms are discussed subsequently in decreasing order of precedence. + +## Expression Typing + +The typing of expressions is often relative to some _expected type_ (which might be undefined). +When we write "expression ´e´ is expected to conform to type ´T´", we mean: + 1. the expected type of ´e´ is ´T´, and + 2. the type of expression ´e´ must conform to ´T´. + +The following skolemization rule is applied universally for every expression: +If the type of an expression would be an existential type ´T´, then the type of the expression is assumed instead to be a [skolemization](03-types.html#existential-types) of ´T´. + + +Skolemization is reversed by type packing. +Assume an expression ´e´ of type ´T´ and let ´t_1[\mathit{tps}\_1] >: L_1 <: U_1, ..., t_n[\mathit{tps}\_n] >: L_n <: U_n´ be all the type variables created by skolemization of some part of ´e´ which are free in ´T´. +Then the _packed type_ of ´e´ is + +```scala +´T´ forSome { type ´t_1[\mathit{tps}\_1] >: L_1 <: U_1´; ...; type ´t_n[\mathit{tps}\_n] >: L_n <: U_n´ }. +``` + +## Literals + +```ebnf +SimpleExpr ::= Literal +``` + +Typing of literals is described along with their [lexical syntax](01-lexical-syntax.html#literals); their evaluation is immediate. + +## The _Null_ Value + +The `null` value is of type `scala.Null`, and thus conforms to every reference type. +It denotes a reference value which refers to a special `null` object. +This object implements methods in class `scala.AnyRef` as follows: + +- `eq(´x\,´)` and `==(´x\,´)` return `true` iff the argument ´x´ is also the "null" object. +- `ne(´x\,´)` and `!=(´x\,´)` return true iff the argument x is not also the "null" object. +- `isInstanceOf[´T\,´]` always returns `false`. +- `asInstanceOf[´T\,´]` returns the [default value](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) of type ´T´. +- `##` returns ``0``. + +A reference to any other member of the "null" object causes a `NullPointerException` to be thrown. + +## Designators + +```ebnf +SimpleExpr ::= Path + | SimpleExpr ‘.’ id +``` + +A designator refers to a named term. It can be a _simple name_ or a _selection_. + +A simple name ´x´ refers to a value as specified [here](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes). +If ´x´ is bound by a definition or declaration in an enclosing class or object ´C´, it is taken to be equivalent to the selection `´C´.this.´x´` where ´C´ is taken to refer to the class containing ´x´ even if the type name ´C´ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the occurrence of ´x´. + +If ´r´ is a [stable identifier](03-types.html#paths) of type ´T´, the selection ´r.x´ refers statically to a term member ´m´ of ´r´ that is identified in ´T´ by the name ´x´. + + + +For other expressions ´e´, ´e.x´ is typed as if it was `{ val ´y´ = ´e´; ´y´.´x´ }`, for some fresh name ´y´. + +The expected type of a designator's prefix is always undefined. +The type of a designator is the type ´T´ of the entity it refers to, with the following exception: The type of a [path](03-types.html#paths) ´p´ which occurs in a context where a [stable type](03-types.html#singleton-types) is required is the singleton type `´p´.type`. + +The contexts where a stable type is required are those that satisfy one of the following conditions: + +1. The path ´p´ occurs as the prefix of a selection and it does not designate a constant, or +1. The expected type ´\mathit{pt}´ is a stable type, or +1. The expected type ´\mathit{pt}´ is an abstract type with a stable type as lower bound, and the type ´T´ of the entity referred to by ´p´ does not conform to ´\mathit{pt}´, or +1. The path ´p´ designates a module. + +The selection ´e.x´ is evaluated by first evaluating the qualifier expression ´e´, which yields an object ´r´, say. +The selection's result is then the member of ´r´ that is either defined by ´m´ or defined by a definition overriding ´m´. + +## This and Super + +```ebnf +SimpleExpr ::= [id ‘.’] ‘this’ + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id +``` + +The expression `this` can appear in the statement part of a template or compound type. +It stands for the object being defined by the innermost template or compound type enclosing the reference. +If this is a compound type, the type of `this` is that compound type. +If it is a template of a class or object definition with simple name ´C´, the type of this is the same as the type of `´C´.this`. + +The expression `´C´.this` is legal in the statement part of an enclosing class or object definition with simple name ´C´. +It stands for the object being defined by the innermost such definition. +If the expression's expected type is a stable type, or `´C´.this` occurs as the prefix of a selection, its type is `´C´.this.type`, otherwise it is the self type of class ´C´. + +A reference `super.´m´` refers statically to a method or type ´m´ in the least proper supertype of the innermost template containing the reference. +It evaluates to the member ´m'´ in the actual supertype of that template which is equal to ´m´ or which overrides ´m´. +The statically referenced member ´m´ must be a type or a method. + + + +If it is a method, it must be concrete, or the template containing the reference must have a member ´m'´ which overrides ´m´ and which is labeled `abstract override`. + +A reference `´C´.super.´m´` refers statically to a method or type ´m´ in the least proper supertype of the innermost enclosing class or object definition named ´C´ which encloses the reference. +It evaluates to the member ´m'´ in the actual supertype of that class or object +which is equal to ´m´ or which overrides ´m´. +The statically referenced member ´m´ must be a type or a method. +If the statically referenced member ´m´ is a method, it must be concrete, or the innermost enclosing class or object definition named ´C´ must have a member ´m'´ which overrides ´m´ and which is labeled `abstract override`. + +The `super` prefix may be followed by a trait qualifier `[´T\,´]`, as in `´C´.super[´T\,´].´x´`. +This is called a _static super reference_. +In this case, the reference is to the type or method of ´x´ in the parent trait of ´C´ whose simple name is ´T´. +That member must be uniquely defined. +If it is a method, it must be concrete. + +###### Example +Consider the following class definitions + +```scala +class Root { def x = "Root" } +class A extends Root { override def x = "A" ; def superA = super.x } +trait B extends Root { override def x = "B" ; def superB = super.x } +class C extends Root with B { + override def x = "C" ; def superC = super.x +} +class D extends A with B { + override def x = "D" ; def superD = super.x +} +``` + +The linearization of class `C` is `{C, B, Root}` and the linearization of class `D` is `{D, B, A, Root}`. +Then we have: + +```scala +(new A).superA == "Root" + +(new C).superB == "Root" +(new C).superC == "B" + +(new D).superA == "Root" +(new D).superB == "A" +(new D).superD == "B" +``` + +Note that the `superB` method returns different results depending on whether `B` is mixed in with class `Root` or `A`. + +## Method Applications + +```ebnf +SimpleExpr ::= SimpleExpr1 ArgumentExprs +ArgumentExprs ::= ‘(’ [Exprs] ‘)’ + | ‘(’ ‘using’ Exprs ‘)’ + | ‘(’ [Exprs ‘,’] PostfixExpr ‘:’ ‘_’ ‘*’ ‘)’ + | [nl] BlockExpr +Exprs ::= Expr {‘,’ Expr} +``` + +An application `´f(e_1, ..., e_m)´` applies the method `´f´` to the argument expressions `´e_1, ..., e_m´`. +For this expression to be well-typed, the method must be *applicable* to its arguments: + +If ´f´ has a method type `(´p_1´:´T_1, ..., p_n´:´T_n´)´U´`, each argument expression ´e_i´ is typed with the corresponding parameter type ´T_i´ as expected type. +Let ´S_i´ be the type of argument ´e_i´ ´(i = 1, ..., m)´. +The method ´f´ must be _applicable_ to its arguments ´e_1, ..., e_n´ of types ´S_1, ..., S_n´. +We say that an argument expression ´e_i´ is a _named_ argument if it has the form `´x_i=e'_i´` and `´x_i´` is one of the parameter names `´p_1, ..., p_n´`. + +Once the types ´S_i´ have been determined, the method ´f´ of the above method type is said to be applicable if all of the following conditions hold: + - for every named argument ´p_j=e_i'´ the type ´S_i´ is [compatible](03-types.html#compatibility) with the parameter type ´T_j´; + - for every positional argument ´e_i´ the type ´S_i´ is [compatible](03-types.html#compatibility) with ´T_i´; + - if the expected type is defined, the result type ´U´ is [compatible](03-types.html#compatibility) to it. + +If ´f´ is instead of some value type, the application is taken to be equivalent to `´f´.apply(´e_1, ..., e_m´)`, i.e. the application of an `apply` method defined by ´f´. +Value `´f´` is applicable to the given arguments if `´f´.apply` is applicable. + +Notes: +- In the case where ´f´ or `´f´.apply` is a polymorphic method, this is taken as an [ommitted type application](#type-applications). +- `´f´` is applicable to the given arguments if the result of this type application is applicable. + +The application `´f´(´e_1, ..., e_n´)` evaluates ´f´ and then each argument ´e_1, ..., e_n´ from left to right, except for arguments that correspond to a by-name parameter (see below). +Each argument expression is converted to the type of its corresponding formal parameter. +After that, the application is rewritten to the method's right hand side, with actual arguments substituted for formal parameters. +The result of evaluating the rewritten right-hand side is finally converted to the method's declared result type, if one is given. + +The case of a formal parameter with a parameterless method type `=> ´T´` is treated specially. +In this case, the corresponding actual argument expression ´e´ is not evaluated before the application. +Instead, every use of the formal parameter on the right-hand side of the rewrite rule entails a re-evaluation of ´e´. +In other words, the evaluation order for `=>`-parameters is _call-by-name_ whereas the evaluation order for normal parameters is _call-by-value_. +Furthermore, it is required that ´e´'s [packed type](#expression-typing) conforms to the parameter type ´T´. +The behavior of by-name parameters is preserved if the application is transformed into a block due to named or default arguments. +In this case, the local value for that parameter has the form `val ´y_i´ = () => ´e´` and the argument passed to the method is `´y_i´()`. + +The last argument in an application may be marked as a sequence argument, e.g. `´e´: _*`. +Such an argument must correspond to a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `´S´*` and it must be the only argument matching this parameter (i.e. the number of formal parameters and actual arguments must be the same). +Furthermore, the type of ´e´ must conform to `scala.Seq[´T´]`, for some type ´T´ which conforms to ´S´. +In this case, the argument list is transformed by replacing the sequence ´e´ with its elements. +When the application uses named arguments, the vararg parameter has to be specified exactly once. + +If only a single argument is supplied, it may be supplied as a block expression and parentheses can be omitted, in the form `´f´ { block }`. +This is valid when `f` has a single formal parameter or when all other formal parameters have default values. + +A method application usually allocates a new frame on the program's run-time stack. +However, if a local method or a final method calls itself as its last action, the call is executed using the stack-frame of the caller. + +###### Example +Assume the following method which computes the sum of a variable number of arguments: + +```scala +def sum(xs: Int*) = xs.foldLeft(0)((x, y) => x + y) +``` + +Then + +```scala +sum(1, 2, 3, 4) +sum(List(1, 2, 3, 4): _*) +``` + +both yield `10` as result. +On the other hand, + +```scala +sum(List(1, 2, 3, 4)) +``` + +would not typecheck. + +An argument list may begin with the soft keyword `using` to facilitate cross-compilation with Scala 3. +The keyword is ignored. + +### Named and Default Arguments + +If an application is to use named arguments ´p = e´ or default arguments, the following conditions must hold. + +- For every named argument ´p_i = e_i´ which appears left of a positional argument in the argument list ´e_1 ... e_m´, the argument position ´i´ coincides with the position of parameter ´p_i´ in the parameter list of the applied method. +- The names ´x_i´ of all named arguments are pairwise distinct and no named argument defines a parameter which is already specified by a positional argument. +- Every formal parameter ´p_j:T_j´ which is not specified by either a positional or named argument has a default argument. + +If the application uses named or default arguments the following transformation is applied to convert it into an application without named or default arguments. + +If the method ´f´ has the form `´p.m´[´\mathit{targs}´]` it is transformed into the block + +```scala +{ val q = ´p´ + q.´m´[´\mathit{targs}´] +} +``` + +If the method ´f´ is itself an application expression the transformation is applied recursively on ´f´. +The result of transforming ´f´ is a block of the form + +```scala +{ val q = ´p´ + val ´x_1´ = expr´_1´ + ... + val ´x_k´ = expr´_k´ + q.´m´[´\mathit{targs}´](´\mathit{args}_1´), ...,(´\mathit{args}_l´) +} +``` + +where every argument in ´(\mathit{args}\_1), ..., (\mathit{args}\_l)´ is a reference to one of the values ´x_1, ..., x_k´. +To integrate the current application into the block, first a value definition using a fresh name ´y_i´ is created for every argument in ´e_1, ..., e_m´, which is initialised to ´e_i´ for positional arguments and to ´e'_i´ for named arguments of the form `´x_i=e'_i´`. +Then, for every parameter which is not specified by the argument list, a value definition using a fresh name ´z_i´ is created, which is initialized using the method computing the [default argument](04-basic-declarations-and-definitions.html#method-declarations-and-definitions) of this parameter. + +Let ´\mathit{args}´ be a permutation of the generated names ´y_i´ and ´z_i´ such such that the position of each name matches the position of its corresponding parameter in the method type `(´p_1:T_1, ..., p_n:T_n´)´U´`. +The final result of the transformation is a block of the form + +```scala +{ val q = ´p´ + val ´x_1´ = expr´_1´ + ... + val ´x_l´ = expr´_k´ + val ´y_1´ = ´e_1´ + ... + val ´y_m´ = ´e_m´ + val ´z_1´ = ´q.m\$default\$i[\mathit{targs}](\mathit{args}_1), ..., (\mathit{args}_l)´ + ... + val ´z_d´ = ´q.m\$default\$j[\mathit{targs}](\mathit{args}_1), ..., (\mathit{args}_l)´ + q.´m´[´\mathit{targs}´](´\mathit{args}_1´), ..., (´\mathit{args}_l´)(´\mathit{args}´) +} +``` + +### Signature Polymorphic Methods + +For invocations of signature polymorphic methods of the target platform `´f´(´e_1, ..., e_m´)`, the invoked method has a different method type `(´p_1´:´T_1, ..., p_n´:´T_n´)´U´` at each call site. +The parameter types `´T_, ..., T_n´` are the types of the argument expressions `´e_1, ..., e_m´`. +If the declared return type `´R´` of the signature polymorphic method is any type other than `scala.AnyRef`, then the return type `´U´` is `´R´`. +Otherwise, `´U´` is the expected type at the call site. If the expected type is undefined then `´U´` is `scala.AnyRef`. +The parameter names `´p_1, ..., p_n´` are fresh. + +###### Note + +On the Java platform version 11 and later, signature polymorphic methods are native, members of `java.lang.invoke.MethodHandle` or `java.lang.invoke.VarHandle`, and have a single repeated parameter of type `java.lang.Object*`. + +## Method Values + +```ebnf +SimpleExpr ::= SimpleExpr1 ‘_’ +``` + +The expression `´e´ _` is well-formed if ´e´ is of method +type or if ´e´ is a call-by-name parameter. +If ´e´ is a method with parameters, `´e´ _` represents ´e´ converted to a function type by [eta expansion](#eta-expansion-section). +If ´e´ is a parameterless method or call-by-name parameter of type `=> ´T´`, `´e´ _` represents the function of type `() => ´T´`, which evaluates ´e´ when it is applied to the empty parameter list `()`. + +###### Example +The method values in the left column are each equivalent to the [eta-expanded expressions](#eta-expansion-section) on the right. + +| placeholder syntax | eta-expansion | +|------------------------------ | ----------------------------------------------------------------------------| +|`math.sin _` | `x => math.sin(x)` | +|`math.pow _` | `(x1, x2) => math.pow(x1, x2)` | +|`val vs = 1 to 9; vs.fold _` | `(z) => (op) => vs.fold(z)(op)` | +|`(1 to 9).fold(z)_` | `{ val eta1 = 1 to 9; val eta2 = z; op => eta1.fold(eta2)(op) }` | +|`Some(1).fold(??? : Int)_` | `{ val eta1 = Some(1); val eta2 = () => ???; op => eta1.fold(eta2())(op) }` | + +Note that a space is necessary between a method name and the trailing underscore because otherwise the underscore would be considered part of the name. + +## Type Applications + +```ebnf +SimpleExpr ::= SimpleExpr TypeArgs +``` + +A _type application_ `´e´[´T_1, ..., T_n´]` instantiates a polymorphic method ´e´ of type `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´S´` with argument types `´T_1, ..., T_n´`. +Every argument type ´T_i´ must obey the corresponding bounds ´L_i´ and ´U_i´. +That is, for each ´i = 1, ..., n´, we must have ´\sigma L_i <: T_i <: \sigma U_i´, where ´\sigma´ is the substitution ´[a_1 := T_1, ..., a_n +:= T_n]´. +The type of the application is ´\sigma S´. + +If ´e´ is not a method, and is instead of some value type, the type application is taken to be equivalent to `´e´.apply[´T_1 , ...,´ T´_n´]`, i.e. the application of an `apply` method defined by ´e´. + +Type applications can be omitted if [local type inference](#local-type-inference) can infer best type parameters for a polymorphic method from the types of the actual method arguments and the expected result type. + +## Tuples + +```ebnf +SimpleExpr ::= ‘(’ [Exprs] ‘)’ +``` +A _tuple expression_ `(´e_1´, ..., ´e_n´)` where ´n \geq 2´ is equivalent to the expression `´e_1´ *: ... *: ´e_n´ *: scala.EmptyTuple`. + +Note: as calls to `*:` are slow, a more efficient translation is free to be implemented. For example, `(´e_1´, ´e_2´)` could be translated to `scala.Tuple2(´e_1´, ´e_2´)`, which is indeed equivalent to `´e_1´ *: ´e_2´ *: scala.EmptyTuple`. + +Notes: +- The expression `(´e_1´)` is not equivalent to `´e_1´ *: scala.EmptyTuple`, but instead a regular parenthesized expression. +- The expression `()` is not an alias for `scala.EmptyTuple`, but instead the unique value of type `scala.Unit`. + +## Instance Creation Expressions + +```ebnf +SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody) +``` + +A _simple instance creation expression_ is of the form `new ´c´` where ´c´ is a [constructor invocation](05-classes-and-objects.html#constructor-invocations). +Let ´T´ be the type of ´c´. +Then ´T´ must denote a (a type instance of) a non-abstract subclass of `scala.AnyRef`. +Furthermore, the _concrete self type_ of the expression must conform to the [self type](05-classes-and-objects.html#templates) of the class denoted by ´T´. +The concrete self type is normally ´T´, except if the expression `new ´c´` appears as the right hand side of a value definition + +```scala +val ´x´: ´S´ = new ´c´ +``` + +(where the type annotation `: ´S´` may be missing). +In the latter case, the concrete self type of the expression is the compound type `´T´ with ´x´.type`. + +The expression is evaluated by creating a fresh object of type ´T´ which is initialized by evaluating ´c´. +The type of the expression is ´T´. + +A _general instance creation expression_ is of the form `new ´t´` for some [class template](05-classes-and-objects.html#templates) ´t´. +Such an expression is equivalent to the block + +```scala +{ class ´a´ extends ´t´; new ´a´ } +``` + +where ´a´ is a fresh name of an _anonymous class_ which is inaccessible to user programs. + +There is also a shorthand form for creating values of structural types: +If `{´D´}` is a class body, then `new {´D´}` is equivalent to the general instance creation expression `new AnyRef{´D´}`. + +###### Example +Consider the following structural instance creation expression: + +```scala +new { def getName() = "aaron" } +``` + +This is a shorthand for the general instance creation expression + +```scala +new AnyRef{ def getName() = "aaron" } +``` + +The latter is in turn a shorthand for the block + +```scala +{ class anon$X extends AnyRef{ def getName() = "aaron" }; new anon$X } +``` + +where `anon$X` is some freshly created name. + +## Blocks + +```ebnf +BlockExpr ::= ‘{’ CaseClauses ‘}’ + | ‘{’ Block ‘}’ +Block ::= BlockStat {semi BlockStat} [ResultExpr] +``` + +A _block expression_ `{´s_1´; ...; ´s_n´; ´e\,´}` is constructed from a sequence of block statements ´s_1, ..., s_n´ and a final expression ´e´. +The statement sequence may not contain two definitions or declarations that bind the same name in the same namespace. +The final expression can be omitted, in which case the unit value `()` is assumed. + +The expected type of the final expression ´e´ is the expected type of the block. +The expected type of all preceding statements is undefined. + + +The type of a block `´s_1´; ...; ´s_n´; ´e´` is some type ´T´ such that: + +- ´U <: T´ where ´U´ is the type of ´e´. +- No value or type name is free in ´T´, i.e., ´T´ does not refer to any value or type locally defined in one of the statements ´s_1, ..., s_n´. +- ´T´ is "as small as possible" (this is a soft requirement). + +The precise way in which we compute ´T´, called _type avoidance_, is currently not defined in this specification. + +Evaluation of the block entails evaluation of its statement sequence, followed by an evaluation of the final expression ´e´, which defines the result of the block. + +A block expression `{´c_1´; ...; ´c_n´}` where ´c_1, ..., c_n´ are case clauses forms a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions). + +## Prefix, Infix, and Postfix Operations + +```ebnf +PostfixExpr ::= InfixExpr [id [nl]] +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr +PrefixExpr ::= [‘-’ | ‘+’ | ‘!’ | ‘~’] SimpleExpr +``` + +Expressions can be constructed from operands and operators. + +### Prefix Operations + +A prefix operation ´\mathit{op};e´ consists of a prefix operator ´\mathit{op}´, which must be one of the identifiers ‘`+`’, ‘`-`’, ‘`!`’ or ‘`~`’, which must not be enclosed in backquotes. +The expression ´\mathit{op};e´ is equivalent to the postfix method application `e.unary_´\mathit{op}´`. + + + +Prefix operators are different from normal method applications in that their operand expression need not be atomic. +For instance, the input sequence `-sin(x)` is read as `-(sin(x))`, whereas the method application `negate sin(x)` would be parsed as the application of the infix operator `sin` to the operands `negate` and `(x)`. + +### Postfix Operations + +A postfix operator can be an arbitrary identifier. +The postfix operation ´e;\mathit{op}´ is interpreted as ´e.\mathit{op}´. + +### Infix Operations + +An infix operator can be an arbitrary identifier. +Infix operators have precedence and associativity defined as follows: + +The _precedence_ of an infix operator is determined by the operator's first character. +Characters are listed below in increasing order of precedence, with characters on the same line having the same precedence. + +```scala +(all letters, as defined in [chapter 1](01-lexical-syntax.html), including `_` and `$`) +| +^ +& += ! +< > +: ++ - +* / % +(other operator characters, as defined in [chapter 1](01-lexical-syntax.html), including Unicode categories `Sm` and `So`) +``` + +That is, operators starting with a letter have lowest precedence, followed by operators starting with ‘`|`’, etc. + +There's one exception to this rule, which concerns [_assignment operators_](#assignment-operators). +The precedence of an assignment operator is the same as the one of simple assignment `(=)`. +That is, it is lower than the precedence of any other operator. + +The _associativity_ of an operator is determined by the operator's +last character. +Operators ending in a colon ‘`:`’ are right-associative. +All other operators are left-associative. + +Precedence and associativity of operators determine the grouping of parts of an expression as follows. + +- If there are several infix operations in an expression, then operators with higher precedence bind more closely than operators with lower precedence. +- If there are consecutive infix operations ´e_0; \mathit{op}\_1; e_1; \mathit{op}\_2 ... \mathit{op}\_n; e_n´ with operators ´\mathit{op}\_1, ..., \mathit{op}\_n´ of the same precedence, then all these operators must have the same associativity. +If all operators are left-associative, the sequence is interpreted as ´(...(e_0;\mathit{op}\_1;e_1);\mathit{op}\_2...);\mathit{op}\_n;e_n´. +Otherwise, if all operators are right-associative, the sequence is interpreted as ´e_0;\mathit{op}\_1;(e_1;\mathit{op}\_2;(... \mathit{op}\_n;e_n)...)´. +- Postfix operators always have lower precedence than infix operators. E.g. ´e_1;\mathit{op}\_1;e_2;\mathit{op}\_2´ is always equivalent to ´(e_1;\mathit{op}\_1;e_2);\mathit{op}\_2´. + +The right-hand operand of a left-associative operator may consist of several arguments enclosed in parentheses, e.g. ´e;\mathit{op};(e_1,...,e_n)´. +This expression is then interpreted as ´e.\mathit{op}(e_1,...,e_n)´. + +A left-associative binary operation ´e_1;\mathit{op};e_2´ is interpreted as ´e_1.\mathit{op}(e_2)´. If ´\mathit{op}´ is right-associative and its parameter is passed by name, the same operation is interpreted as ´e_2.\mathit{op}(e_1)´. +If ´\mathit{op}´ is right-associative and its parameter is passed by value, it is interpreted as `{ val ´x´=´e_1´; ´e_2´.´\mathit{op}´(´x\,´) }`, where ´x´ is a fresh name. + +### Assignment Operators + +An _assignment operator_ is an operator symbol (syntax category `op` in [Identifiers](01-lexical-syntax.html#identifiers)) that ends in an equals character “`=`”, with the following exceptions: + +1. the operator also starts with an equals character, or +1. the operator is one of `(<=)`, `(>=)`, `(!=)`. + +Assignment operators are treated specially in that they can be expanded to assignments if no other interpretation is valid. + +Let's consider an assignment operator such as `+=` in an infix operation `´l´ += ´r´`, where ´l´, ´r´ are expressions. +This operation can be re-interpreted as an operation which corresponds to the assignment + +```scala +´l´ = ´l´ + ´r´ +``` + +except that the operation's left-hand-side ´l´ is evaluated only once. + +The re-interpretation occurs if the following two conditions are fulfilled. + +1. The left-hand-side ´l´ does not have a member named `+=`, and also cannot be converted by an [implicit conversion](#implicit-conversions) to a value with a member named `+=`. +1. The assignment `´l´ = ´l´ + ´r´` is type-correct. +In particular this implies that ´l´ refers to a variable or object that can be assigned to, and that is convertible to a value with a member named `+`. + +## Typed Expressions + +```ebnf +Expr1 ::= PostfixExpr ‘:’ CompoundType +``` + +The _typed expression_ ´e: T´ has type ´T´. +The type of expression ´e´ is expected to conform to ´T´. +The result of the expression is the value of ´e´ converted to type ´T´. + +###### Example +Here are examples of well-typed and ill-typed expressions. + +```scala +1: Int // legal, of type Int +1: Long // legal, of type Long +// 1: string // ***** illegal +``` + +## Annotated Expressions + +```ebnf +Expr1 ::= PostfixExpr ‘:’ Annotation {Annotation} +``` + +An _annotated expression_ `´e´: @´a_1´ ... @´a_n´` attaches [annotations](11-annotations.html#user-defined-annotations) ´a_1, ..., a_n´ to the expression ´e´. + +## Assignments + +```ebnf +Expr1 ::= [SimpleExpr ‘.’] id ‘=’ Expr + | PrefixOperator SimpleExpr ‘=’ Expr + | SimpleExpr1 ArgumentExprs ‘=’ Expr +``` + +The interpretation of an assignment to a simple variable `´x´ = ´e´` depends on the definition of ´x´. +If ´x´ denotes a mutable variable, then the assignment changes the current value of ´x´ to be the result of evaluating the expression ´e´. +The type of ´e´ is expected to conform to the type of ´x´. +If ´x´ is a parameterless method defined in some template, and the same template contains a setter method `´x´_=` as member, then the assignment `´x´ = ´e´` is interpreted as the invocation `´x´_=(´e\,´)` of that setter method. +Analogously, an assignment `´f.x´ = ´e´` to a parameterless method ´x´ is interpreted as the invocation `´f.x´_=(´e\,´)`. +If ´x´ is an application of a unary operator, then the expression is interpreted as though it were written as the explicit application `´x´.unary_´\mathit{op}´`, namely, as `´x´.unary_´\mathit{op}´_=(´e\,´)`. + +An assignment `´f´(´\mathit{args}\,´) = ´e´` with a method application to the left of the ‘`=`’ operator is interpreted as `´f.´update(´\mathit{args}´, ´e\,´)`, i.e. the invocation of an `update` method defined by ´f´. + +###### Example +Here are some assignment expressions and their equivalent expansions. + +| assignment | expansion | +|--------------------------|----------------------| +|`x.f = e` | `x.f_=(e)` | +|`x.f() = e` | `x.f.update(e)` | +|`x.f(i) = e` | `x.f.update(i, e)` | +|`x.f(i, j) = e` | `x.f.update(i, j, e)`| + +###### Example Imperative Matrix Multiplication + +Here is the usual imperative code for matrix multiplication. + +```scala +def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { + val zss: Array[Array[Double]] = new Array(xss.length, yss(0).length) + var i = 0 + while (i < xss.length) { + var j = 0 + while (j < yss(0).length) { + var acc = 0.0 + var k = 0 + while (k < yss.length) { + acc = acc + xss(i)(k) * yss(k)(j) + k += 1 + } + zss(i)(j) = acc + j += 1 + } + i += 1 + } + zss +} +``` + +Desugaring the array accesses and assignments yields the following expanded version: + +```scala +def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { + val zss: Array[Array[Double]] = new Array(xss.length, yss.apply(0).length) + var i = 0 + while (i < xss.length) { + var j = 0 + while (j < yss.apply(0).length) { + var acc = 0.0 + var k = 0 + while (k < yss.length) { + acc = acc + xss.apply(i).apply(k) * yss.apply(k).apply(j) + k += 1 + } + zss.apply(i).update(j, acc) + j += 1 + } + i += 1 + } + zss +} +``` + +## Conditional Expressions + +```ebnf +Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] +``` + +The _conditional expression_ `if (´e_1´) ´e_2´ else ´e_3´` chooses one of the values of ´e_2´ and ´e_3´, depending on the value of ´e_1´. +The condition ´e_1´ is expected to conform to type `Boolean`. +The then-part ´e_2´ and the else-part ´e_3´ are both expected to conform to the expected type of the conditional expression. +The type of the conditional expression is the [weak least upper bound](03-types.html#weak-conformance) of the types of ´e_2´ and ´e_3´. +A semicolon preceding the `else` symbol of a conditional expression is ignored. + +The conditional expression is evaluated by evaluating first ´e_1´. +If this evaluates to `true`, the result of evaluating ´e_2´ is returned, otherwise the result of evaluating ´e_3´ is returned. + +A short form of the conditional expression eliminates the else-part. +The conditional expression `if (´e_1´) ´e_2´` is evaluated as if it was `if (´e_1´) ´e_2´ else ()`. + +## While Loop Expressions + +```ebnf +Expr1 ::= ‘while’ ‘(’ Expr ‘)’ {nl} Expr +``` + +The _while loop expression_ `while (´e_1´) ´e_2´` is typed and evaluated as if it was an application of `whileLoop (´e_1´) (´e_2´)` where the hypothetical method `whileLoop` is defined as follows. + +```scala +def whileLoop(cond: => Boolean)(body: => Unit): Unit = + if (cond) { body ; whileLoop(cond)(body) } else {} +``` + +## For Comprehensions and For Loops + +```ebnf +Expr1 ::= ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) + {nl} [‘yield’] Expr +Enumerators ::= Generator {semi Generator} +Generator ::= [‘case’] Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} +Guard ::= ‘if’ PostfixExpr +``` + +A _for loop_ `for (´\mathit{enums}\,´) ´e´` executes expression ´e´ for each binding generated by the enumerators ´\mathit{enums}´. +A _for comprehension_ `for (´\mathit{enums}\,´) yield ´e´` evaluates expression ´e´ for each binding generated by the enumerators ´\mathit{enums}´ and collects the results. +An enumerator sequence always starts with a generator; this can be followed by further generators, value definitions, or guards. + +A _generator_ `´p´ <- ´e´` produces bindings from an expression ´e´ which is matched in some way against pattern ´p´. +Optionally, `case` can appear in front of a generator pattern, this has no meaning in Scala 2 but will be [required in Scala 3 if `p` is not irrefutable](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html). + +A _value definition_ `´p´ = ´e´` binds the value name ´p´ (or several names in a pattern ´p´) to the result of evaluating the expression ´e´. +A _guard_ `if ´e´` contains a boolean expression which restricts enumerated bindings. +The precise meaning of generators and guards is defined by translation to invocations of four methods: `map`, `withFilter`, `flatMap`, and `foreach`. +These methods can be implemented in different ways for different carrier types. + +The translation scheme is as follows. +In a first step, every generator `´p´ <- ´e´`, where ´p´ is not [irrefutable](08-pattern-matching.html#patterns) for the type of ´e´ is replaced by + +```scala +´p´ <- ´e´.withFilter { case ´p´ => true; case _ => false } +``` + +Then, the following rules are applied repeatedly until all comprehensions have been eliminated. + + - A for comprehension `for (´p´ <- ´e\,´) yield ´e'´` is translated to `´e´.map { case ´p´ => ´e'´ }`. + - A for loop `for (´p´ <- ´e\,´) ´e'´` is translated to `´e´.foreach { case ´p´ => ´e'´ }`. + - A for comprehension + + ```scala + for (´p´ <- ´e´; ´p'´ <- ´e'; ...´) yield ´e''´ + ``` + + where `...` is a (possibly empty) sequence of generators, definitions, or guards, is translated to + + ```scala + ´e´.flatMap { case ´p´ => for (´p'´ <- ´e'; ...´) yield ´e''´ } + ``` + + - A for loop + + ```scala + for (´p´ <- ´e´; ´p'´ <- ´e'; ...´) ´e''´ + ``` + + where `...` is a (possibly empty) sequence of generators, definitions, or guards, is translated to + + ```scala + ´e´.foreach { case ´p´ => for (´p'´ <- ´e'; ...´) ´e''´ } + ``` + + - A generator `´p´ <- ´e´` followed by a guard `if ´g´` is translated to a single generator `´p´ <- ´e´.withFilter((´x_1, ..., x_n´) => ´g\,´)` where ´x_1, ..., x_n´ are the free variables of ´p´. + + - A generator `´p´ <- ´e´` followed by a value definition `´p'´ = ´e'´` is translated to the following generator of pairs of values, where ´x´ and ´x'´ are fresh names: + + ```scala + (´p´, ´p'´) <- for (´x @ p´ <- ´e´) yield { val ´x' @ p'´ = ´e'´; (´x´, ´x'´) } + ``` + +###### Example +The following code produces all pairs of numbers between ´1´ and ´n-1´ whose sums are prime. + +```scala +for { i <- 1 until n + j <- 1 until i + if isPrime(i+j) +} yield (i, j) +``` + +The for comprehension is translated to: + +```scala +(1 until n) + .flatMap { + case i => (1 until i) + .withFilter { j => isPrime(i+j) } + .map { case j => (i, j) } } +``` + +###### Example +For comprehensions can be used to express vector and matrix algorithms concisely. +For instance, here is a method to compute the transpose of a given matrix: + + + +```scala +def transpose[A](xss: Array[Array[A]]) = { + for (i <- Array.range(0, xss(0).length)) yield + for (xs <- xss) yield xs(i) +} +``` + +Here is a method to compute the scalar product of two vectors: + +```scala +def scalprod(xs: Array[Double], ys: Array[Double]) = { + var acc = 0.0 + for ((x, y) <- xs zip ys) acc = acc + x * y + acc +} +``` + +Finally, here is a method to compute the product of two matrices. +Compare with the [imperative version](#example-imperative-matrix-multiplication). + +```scala +def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { + val ysst = transpose(yss) + for (xs <- xss) yield + for (yst <- ysst) yield + scalprod(xs, yst) +} +``` + +The code above makes use of the fact that `map`, `flatMap`, `withFilter`, and `foreach` are defined for instances of class `scala.Array`. + +## Return Expressions + +```ebnf +Expr1 ::= ‘return’ [Expr] +``` + +A _return expression_ `return ´e´` must occur inside the body of some enclosing user defined method. +The innermost enclosing method in a source program, ´m´, must have an explicitly declared result type, and the type of ´e´ must conform to it. + +The return expression evaluates the expression ´e´ and returns its value as the result of ´m´. +The evaluation of any statements or expressions following the return expression is omitted. +The type of a return expression is `scala.Nothing`. + +The expression ´e´ may be omitted. +The return expression `return` is type-checked and evaluated as if it were `return ()`. + +Returning from the method from within a nested function may be implemented by throwing and catching a `scala.runtime.NonLocalReturnControl`. +Any exception catches between the point of return and the enclosing methods might see and catch that exception. +A key comparison makes sure that this exception is only caught by the method instance which is terminated by the return. + +If the return expression is itself part of an anonymous function, it is possible that the enclosing method ´m´ has already returned before the return expression is executed. +In that case, the thrown `scala.runtime.NonLocalReturnControl` will not be caught, and will propagate up the call stack. + +## Throw Expressions + +```ebnf +Expr1 ::= ‘throw’ Expr +``` + +A _throw expression_ `throw ´e´` evaluates the expression ´e´. +The type of this expression must conform to `Throwable`. +If ´e´ evaluates to an exception reference, evaluation is aborted with the thrown exception. +If ´e´ evaluates to `null`, evaluation is instead aborted with a `NullPointerException`. +If there is an active [`try` expression](#try-expressions) which handles the thrown exception, evaluation resumes with the handler; otherwise the thread executing the `throw` is aborted. +The type of a throw expression is `scala.Nothing`. + +## Try Expressions + +```ebnf +Expr1 ::= ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] +``` + +A _try expression_ is of the form `try { ´b´ } catch ´h´` where the handler ´h´ is usually a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions) + +```scala +{ case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ } +``` + +This expression is evaluated by evaluating the block ´b´. +If evaluation of ´b´ does not cause an exception to be thrown, the result of ´b´ is returned. +Otherwise the handler ´h´ is applied to the thrown exception. +If the handler contains a case matching the thrown exception, the first such case is invoked. +If the handler contains no case matching the thrown exception, the exception is re-thrown. +More generally, if the handler is a `PartialFunction`, it is applied only if it is defined at the given exception. + +Let ´\mathit{pt}´ be the expected type of the try expression. +The block ´b´ is expected to conform to ´\mathit{pt}´. +The handler ´h´ is expected conform to type `scala.Function[scala.Throwable, ´\mathit{pt}\,´]`. +The type of the try expression is the [weak least upper bound](03-types.html#weak-conformance) of the type of ´b´ and the result type of ´h´. + +A try expression `try { ´b´ } finally ´e´` evaluates the block ´b´. +If evaluation of ´b´ does not cause an exception to be thrown, the expression ´e´ is evaluated. +If an exception is thrown during evaluation of ´e´, the evaluation of the try expression is aborted with the thrown exception. +If no exception is thrown during evaluation of ´e´, the result of ´b´ is returned as the result of the try expression. + +If an exception is thrown during evaluation of ´b´, the finally block ´e´ is also evaluated. +If another exception ´e´ is thrown during evaluation of ´e´, evaluation of the try expression is aborted with the thrown exception. +If no exception is thrown during evaluation of ´e´, the original exception thrown in ´b´ is re-thrown once evaluation of ´e´ has completed. +The block ´b´ is expected to conform to the expected type of the try expression. +The finally expression ´e´ is expected to conform to type `Unit`. + +A try expression `try { ´b´ } catch ´e_1´ finally ´e_2´` is a shorthand for `try { try { ´b´ } catch ´e_1´ } finally ´e_2´`. + +## Anonymous Functions + +```ebnf +Expr ::= (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr +ResultExpr ::= (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block +Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’ +Binding ::= (id | ‘_’) [‘:’ Type] +``` + +The anonymous function of arity ´n´, `(´x_1´: ´T_1, ..., x_n´: ´T_n´) => e` maps parameters ´x_i´ of types ´T_i´ to a result given by expression ´e´. +The scope of each formal parameter ´x_i´ is ´e´. +Formal parameters must have pairwise distinct names. +Type bindings can be omitted, in which case the compiler will attempt to infer valid bindings. + +Note: `() => ´e´` defines a nullary function (´n´ = 0), and not for example `(_: Unit) => ´e´`. + +In the case of a single untyped formal parameter, `(´x\,´) => ´e´` can be abbreviated to `´x´ => ´e´`. +If an anonymous function `(´x´: ´T\,´) => ´e´` with a single typed parameter appears as the result expression of a block, it can be abbreviated to `´x´: ´T´ => e`. + +A formal parameter may also be a wildcard represented by an underscore `_`. +In that case, a fresh name for the parameter is chosen arbitrarily. + +A named parameter of an anonymous function may be optionally preceded by an `implicit` modifier. +In that case the parameter is labeled [`implicit`](07-implicits.html#implicit-parameters-and-views); however the parameter section itself does not count as an [implicit parameter section](07-implicits.html#implicit-parameters). +Hence, arguments to anonymous functions always have to be given explicitly. + +### Translation +If the expected type of the anonymous function is of the shape `scala.Function´n´[´S_1´, ..., ´S_n´, ´R\,´]`, or can be [SAM-converted](#sam-conversion) to such a function type, the type `´T_i´` of a parameter `´x_i´` can be omitted, as far as `´S_i´` is defined in the expected type, and `´T_i´ = ´S_i´` is assumed. +Furthermore, the expected type when type checking ´e´ is ´R´. + +If there is no expected type for the function literal, all formal parameter types `´T_i´` must be specified explicitly, and the expected type of ´e´ is undefined. +The type of the anonymous function is `scala.Function´n´[´T_1´, ..., ´T_n´, ´R\,´]`, where ´R´ is the [packed type](#expression-typing) of ´e´. +´R´ must be equivalent to a type which does not refer to any of the formal parameters ´x_i´. + +The eventual run-time value of an anonymous function is determined by the expected type: + - a subclass of one of the builtin function types, `scala.Function´n´[´S_1, ..., S_n´, ´R\,´]` (with ´S_i´ and ´R´ fully defined), + - a [single-abstract-method (SAM) type](#sam-conversion); + - `PartialFunction[´T´, ´U´]` + - some other type. + +The standard anonymous function evaluates in the same way as the following instance creation expression: + +```scala +new scala.Function´n´[´T_1, ..., T_n´, ´T´] { + def apply(´x_1´: ´T_1, ..., x_n´: ´T_n´): ´T´ = ´e´ +} +``` + +The same evaluation holds for a SAM type, except that the instantiated type is given by the SAM type, and the implemented method is the single abstract method member of this type. + +The underlying platform may provide more efficient ways of constructing these instances, such as Java 8's `invokedynamic` bytecode and `LambdaMetaFactory` class. + +When a `PartialFunction` is required, an additional member `isDefinedAt` is synthesized, which simply returns `true`. +However, if the function literal has the shape `x => x match { $...$ }`, then `isDefinedAt` is derived from the pattern match in the following way: each case from the match expression evaluates to `true`, and if there is no default case, a default case is added that evaluates to `false`. +For more details on how that is implemented see ["Pattern Matching Anonymous Functions"](08-pattern-matching.html#pattern-matching-anonymous-functions). + +###### Example +Examples of anonymous functions: + +```scala +x => x // The identity function + +f => g => x => f(g(x)) // Curried function composition + +(x: Int, y: Int) => x + y // A summation function + +() => { count += 1; count } // The function which takes an + // empty parameter list ´()´, + // increments a non-local variable + // `count' and returns the new value. + +_ => 5 // The function that ignores its argument + // and always returns 5. +``` + +### Placeholder Syntax for Anonymous Functions + +```ebnf +SimpleExpr1 ::= ‘_’ +``` + +An expression (of syntactic category `Expr`) may contain embedded underscore symbols `_` at places where identifiers are legal. +Such an expression represents an anonymous function where subsequent occurrences of underscores denote successive parameters. + +Define an _underscore section_ to be an expression of the form `_:´T´` where ´T´ is a type, or else of the form `_`, provided the underscore does not appear as the expression part of a type ascription `_:´T´`. + +An expression ´e´ of syntactic category `Expr` _binds_ an underscore section ´u´, if the following two conditions hold: (1) ´e´ properly contains ´u´, and (2) there is no other expression of syntactic category `Expr` which is properly contained in ´e´ and which itself properly contains ´u´. + +If an expression ´e´ binds underscore sections ´u_1, ..., u_n´, in this order, it is equivalent to the anonymous function `(´u'_1´, ... ´u'_n´) => ´e'´` where each ´u_i'´ results from ´u_i´ by replacing the underscore with a fresh identifier and ´e'´ results from ´e´ by replacing each underscore section ´u_i´ by ´u_i'´. + +###### Example +The anonymous functions in the left column use placeholder syntax. +Each of these is equivalent to the anonymous function on its right. + +| | | +|---------------------------|----------------------------| +|`_ + 1` | `x => x + 1` | +|`_ * _` | `(x1, x2) => x1 * x2` | +|`(_: Int) * 2` | `(x: Int) => (x: Int) * 2` | +|`if (_) x else y` | `z => if (z) x else y` | +|`_.map(f)` | `x => x.map(f)` | +|`_.map(_ + 1)` | `x => x.map(y => y + 1)` | + +## Constant Expressions + +Constant expressions are expressions that the Scala compiler can evaluate to a constant. +The definition of "constant expression" depends on the platform, but they include at least the expressions of the following forms: + +- A literal of a value class, such as an integer +- A string literal +- A class constructed with [`Predef.classOf`](12-the-scala-standard-library.html#the-predef-object) +- An element of an enumeration from the underlying platform +- A literal array, of the form `Array´(c_1, ..., c_n)´`, where all of the ´c_i´'s are themselves constant expressions +- An identifier defined by a [constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions). + +## Statements + +```ebnf +BlockStat ::= Import + | {Annotation} [‘implicit’] [‘lazy’] Def + | {Annotation} {LocalModifier} TmplDef + | Expr1 + | +TemplateStat ::= Import + | {Annotation} {Modifier} Def + | {Annotation} {Modifier} Dcl + | Expr + | +``` + +Statements occur as parts of blocks and templates. +A _statement_ can be an import, a definition or an expression, or it can be empty. +Statements used in the template of a class definition can also be declarations. +An expression that is used as a statement can have an arbitrary value type. +An expression statement ´e´ is evaluated by evaluating ´e´ and discarding the result of the evaluation. + + + +Block statements may be definitions which bind local names in the block. +The only modifier allowed in all block-local definitions is `implicit`. +When prefixing a class or object definition, modifiers `abstract`, `final`, and `sealed` are also permitted. + +Evaluation of a statement sequence entails evaluation of the statements in the order they are written. + +## Implicit Conversions + +Implicit conversions can be applied to expressions whose type does not match their expected type, to qualifiers in selections, and to unapplied methods. +The available implicit conversions are given in the next two sub-sections. + +### Value Conversions + +The following seven implicit conversions can be applied to an expression ´e´ which has some value type ´T´ and which is type-checked with some expected type ´\mathit{pt}´. + +###### Static Overloading Resolution +If an expression denotes several possible members of a class, [overloading resolution](#overloading-resolution) is applied to pick a unique member. + +###### Type Instantiation +An expression ´e´ of polymorphic type + +```scala +[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´T´ +``` + +which does not appear as the function part of a type application is converted to a type instance of ´T´ by determining with [local type inference](#local-type-inference) instance types `´T_1, ..., T_n´` for the type variables `´a_1, ..., a_n´` and implicitly embedding ´e´ in the [type application](#type-applications) `´e´[´T_1, ..., T_n´]`. + +###### Numeric Widening +If ´e´ has a primitive number type which [weakly conforms](03-types.html#weak-conformance) to the expected type, it is widened to the expected type using one of the numeric conversion methods `toShort`, `toChar`, `toInt`, `toLong`, `toFloat`, `toDouble` defined [in the standard library](12-the-scala-standard-library.html#numeric-value-types). + +Since conversions from `Int` to `Float` and from `Long` to `Float` or `Double` may incur a loss of precision, those implicit conversions are deprecated. +The conversion is permitted for literals if the original value can be recovered, that is, if conversion back to the original type produces the original value. + +###### Numeric Literal Narrowing +If the expected type is `Byte`, `Short` or `Char`, and the expression ´e´ is an integer literal fitting in the range of that type, it is converted to the same literal in that type. + +###### Value Discarding +If ´e´ has some value type and the expected type is `Unit`, ´e´ is converted to the expected type by embedding it in the term `{ ´e´; () }`. + +###### SAM conversion +An expression `(p1, ..., pN) => body` of function type `(T1, ..., TN) => T` is sam-convertible to the expected type `S` if the following holds: + - the class `C` of `S` declares an abstract method `m` with signature `(p1: A1, ..., pN: AN): R`; + - besides `m`, `C` must not declare or inherit any other deferred value members; + - the method `m` must have a single argument list; + - there must be a type `U` that is a subtype of `S`, so that the expression `new U { final def m(p1: A1, ..., pN: AN): R = body }` is well-typed (conforming to the expected type `S`); + - for the purpose of scoping, `m` should be considered a static member (`U`'s members are not in scope in `body`); + - `(A1, ..., AN) => R` is a subtype of `(T1, ..., TN) => T` (satisfying this condition drives type inference of unknown type parameters in `S`); + +Note that a function literal that targets a SAM is not necessarily compiled to the above instance creation expression. +This is platform-dependent. + +It follows that: + - if class `C` defines a constructor, it must be accessible and must define exactly one, empty, argument list; + - class `C` cannot be `final` or `sealed` (for simplicity we ignore the possibility of SAM conversion in the same compilation unit as the sealed class); + - `m` cannot be polymorphic; + - it must be possible to derive a fully-defined type `U` from `S` by inferring any unknown type parameters of `C`. + +Finally, we impose some implementation restrictions (these may be lifted in future releases): + - `C` must not be nested or local (it must not capture its environment, as that results in a nonzero-argument constructor) + - `C`'s constructor must not have an implicit argument list (this simplifies type inference); + - `C` must not declare a self type (this simplifies type inference); + - `C` must not be `@specialized`. + +###### View Application +If none of the previous conversions applies, and ´e´'s type does not conform to the expected type ´\mathit{pt}´, it is attempted to convert ´e´ to the expected type with a [view](07-implicits.html#views). + +###### Selection on `Dynamic` +If none of the previous conversions applies, and ´e´ is a prefix of a selection ´e.x´, and ´e´'s type conforms to class `scala.Dynamic`, then the selection is rewritten according to the rules for [dynamic member selection](#dynamic-member-selection). + +### Method Conversions + +The following four implicit conversions can be applied to methods which are not applied to some argument list. + +###### Evaluation +A parameterless method ´m´ of type `=> ´T´` is always converted to type ´T´ by evaluating the expression to which ´m´ is bound. + +###### Implicit Application +If the method takes only implicit parameters, implicit arguments are passed following the rules [here](07-implicits.html#implicit-parameters). + +###### Eta Expansion +Otherwise, if the method is not a constructor, and the expected type ´\mathit{pt}´ is a function type, or, for methods of non-zero arity, a type [sam-convertible](#sam-conversion) to a function type, ´(\mathit{Ts}') \Rightarrow T'´, [eta-expansion](#eta-expansion-section) is performed on the expression ´e´. + +(The exception for zero-arity methods is to avoid surprises due to unexpected sam conversion.) + +###### Empty Application +Otherwise, if ´e´ has method type ´()T´, it is implicitly applied to the empty argument list, yielding ´e()´. + +### Overloading Resolution + +If an identifier or selection ´e´ references several members of a class, the context of the reference is used to identify a unique member. +The way this is done depends on whether or not ´e´ is used as a method. +Let ´\mathscr{A}´ be the set of members referenced by ´e´. + +Assume first that ´e´ appears as a function in an application, as in `´e´(´e_1´, ..., ´e_m´)`. + +One first determines the set of methods that are potentially [applicable](#method-applications) based on the _shape_ of the arguments. + +The *shape* of an argument expression ´e´, written ´\mathit{shape}(e)´, is a type that is defined as follows: + - For a function expression `(´p_1´: ´T_1, ..., p_n´: ´T_n´) => ´b´: (Any, ..., Any) => ´\mathit{shape}(b)´`, where `Any` occurs ´n´ times in the argument type. + - For a pattern-matching anonymous function definition `{ case ... }`: `PartialFunction[Any, Nothing]`. + - For a named argument `´n´ = ´e´`: ´\mathit{shape}(e)´. + - For all other expressions: `Nothing`. + +Let ´\mathscr{B}´ be the set of alternatives in ´\mathscr{A}´ that are [_applicable_](#method-applications) to expressions ´(e_1, ..., e_n)´ of types ´(\mathit{shape}(e_1), ..., \mathit{shape}(e_n))´. +If there is precisely one alternative in ´\mathscr{B}´, that alternative is chosen. + +Otherwise, let ´S_1, ..., S_m´ be the list of types obtained by typing each argument as follows. + +Normally, an argument is typed without an expected type, except when all alternatives explicitly specify the same parameter type for this argument (a missing parameter type, due to e.g. arity differences, is taken as `NoType`, thus resorting to no expected type), or when trying to propagate more type information to aid inference of higher-order function parameter types, as explained next. + +The intuition for higher-order function parameter type inference is that all arguments must be of a function-like type (`PartialFunction`, `FunctionN` or some equivalent [SAM type](#sam-conversion)), which in turn must define the same set of higher-order argument types, so that they can safely be used as the expected type of a given argument of the overloaded method, without unduly ruling out any alternatives. +The intent is not to steer overloading resolution, but to preserve enough type information to steer type inference of the arguments (a function literal or eta-expanded method) to this overloaded method. + +Note that the expected type drives eta-expansion (not performed unless a function-like type is expected), as well as inference of omitted parameter types of function literals. + +More precisely, an argument `´e_i´` is typed with an expected type that is derived from the `´i´`th argument type found in each alternative (call these `´T_{ij}´` for alternative `´j´` and argument position `´i´`) when all `´T_{ij}´` are function types `´(A_{1j},..., A_{nj}) => ?´` (or the equivalent `PartialFunction`, or SAM) of some arity `´n´`, and their argument types `´A_{kj}´` are identical across all overloads `´j´` for a given `´k´`. +Then, the expected type for `´e_i´` is derived as follows: + - we use `´PartialFunction[A_{1j},..., A_{nj}, ?]´` if for some overload `´j´`, `´T_{ij}´`'s type symbol is `PartialFunction`; + - else, if for some `´j´`, `´T_{ij}´` is `FunctionN`, the expected type is `´FunctionN[A_{1j},..., A_{nj}, ?]´`; + - else, if for all `´j´`, `´T_{ij}´` is a SAM type of the same class, defining argument types `´A_{1j},..., A_{nj}´` (and a potentially varying result type), the expected type encodes these argument types and the SAM class. + +For every member ´m´ in ´\mathscr{B}´ one determines whether it is applicable to expressions (´e_1, ..., e_m´) of types ´S_1, ..., S_m´. + +It is an error if none of the members in ´\mathscr{B}´ is applicable. +If there is one single applicable alternative, that alternative is chosen. +Otherwise, let ´\mathscr{CC}´ be the set of applicable alternatives which don't employ any default argument in the application to ´e_1, ..., e_m´. + +It is again an error if ´\mathscr{CC}´ is empty. +Otherwise, one chooses the _most specific_ alternative among the alternatives in ´\mathscr{CC}´, according to the following definition of being "as specific as", and "more specific than": + + + +- A parameterized method ´m´ of type `(´p_1:T_1, ..., p_n:T_n´)´U´` is _as specific as_ some other member ´m'´ of type ´S´ if ´m'´ is [applicable](#method-applications) to arguments `(´p_1, ..., p_n´)` of types ´T_1, ..., T_n´. + If the last parameter `´p_n´` has a vararg type `´T*´`, then `m` must be applicable to arbitrary numbers of `´T´` parameters (which implies that it must be a varargs method as well). +- A polymorphic method of type `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´T´` is as specific as some other member ´m'´ of type ´S´ if ´T´ is as specific as ´S´ under the assumption that for ´i = 1, ..., n´ each ´a_i´ is an abstract type name bounded from below by ´L_i´ and from above by ´U_i´. +- A member of any other type ´T´ is: + - always as specific as a parameterized method or a polymorphic method. + - as specific as a member ´m'´ of any other type ´S´ if ´T´ is [compatible](03-types.html#compatibility) with ´S´. + +The _relative weight_ of an alternative ´A´ over an alternative ´B´ is a +number from 0 to 2, defined as the sum of + +- 1 if ´A´ is as specific as ´B´, 0 otherwise, and +- 1 if ´A´ is defined in a class or object which is derived from the class or object defining ´B´, 0 otherwise. + +A class or object ´C´ is _derived_ from a class or object ´D´ if one of the following holds: + +- ´C´ is a subclass of ´D´, or +- ´C´ is a companion object of a class derived from ´D´, or +- ´D´ is a companion object of a class from which ´C´ is derived. + +An alternative ´A´ is _more specific_ than an alternative ´B´ if the relative weight of ´A´ over ´B´ is greater than the relative weight of ´B´ over ´A´. + +It is an error if there is no alternative in ´\mathscr{CC}´ which is more specific than all other alternatives in ´\mathscr{CC}´. + +Assume next that ´e´ appears as a method in a type application, as in `´e´[´\mathit{targs}\,´]`. +Then all alternatives in ´\mathscr{A}´ which take the same number of type parameters as there are type arguments in ´\mathit{targs}´ are chosen. +It is an error if no such alternative exists. +If there are several such alternatives, overloading resolution is applied again to the whole expression `´e´[´\mathit{targs}\,´]`. + +Assume finally that ´e´ does not appear as a method in either an application or a type application. +If an expected type is given, let ´\mathscr{B}´ be the set of those alternatives in ´\mathscr{A}´ which are [compatible](03-types.html#compatibility) to it. +Otherwise, let ´\mathscr{B}´ be the same as ´\mathscr{A}´. +In this last case we choose the most specific alternative among all alternatives in ´\mathscr{B}´. +It is an error if there is no alternative in ´\mathscr{B}´ which is more specific than all other alternatives in ´\mathscr{B}´. + +###### Example +Consider the following definitions: + +```scala +class A extends B {} +def f(x: B, y: B) = ... +def f(x: A, y: B) = ... +val a: A +val b: B +``` + +Then the application `f(b, b)` refers to the first definition of ´f´ whereas the application `f(a, a)` refers to the second. +Assume now we add a third overloaded definition + +```scala +def f(x: B, y: A) = ... +``` + +Then the application `f(a, a)` is rejected for being ambiguous, since no most specific applicable signature exists. + +### Local Type Inference + +Local type inference infers type arguments to be passed to expressions of polymorphic type. +Say ´e´ is of type [´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´T´ and no explicit type parameters are given. + +Local type inference converts this expression to a type application `´e´[´T_1, ..., T_n´]`. +The choice of the type arguments ´T_1, ..., T_n´ depends on the context in which the expression appears and on the expected type ´\mathit{pt}´. +There are three cases. + +###### Case 1: Selections +If the expression appears as the prefix of a selection with a name ´x´, then type inference is _deferred_ to the whole expression ´e.x´. +That is, if ´e.x´ has type ´S´, it is now treated as having type [´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´S´, and local type inference is applied in turn to infer type arguments for ´a_1, ..., a_n´, using the context in which ´e.x´ appears. + +###### Case 2: Values +If the expression ´e´ appears as a value without being applied to value arguments, the type arguments are inferred by solving a constraint system which relates the expression's type ´T´ with the expected type ´\mathit{pt}´. +Without loss of generality we can assume that ´T´ is a value type; if it is a method type we apply [eta-expansion](#eta-expansion-section) to convert it to a function type. +Solving means finding a substitution ´\sigma´ of types ´T_i´ for the type parameters ´a_i´ such that + +- None of the inferred types ´T_i´ is a [singleton type](03-types.html#singleton-types) unless it is a singleton type corresponding to an object or a constant value definition or the corresponding bound ´U_i´ is a subtype of `scala.Singleton`. +- All type parameter bounds are respected, i.e. ´\sigma L_i <: \sigma a_i´ and ´\sigma a_i <: \sigma U_i´ for ´i = 1, ..., n´. +- The expression's type conforms to the expected type, i.e. ´\sigma T <: \sigma \mathit{pt}´. + +It is a compile time error if no such substitution exists. +If several substitutions exist, local-type inference will choose for each type variable ´a_i´ a minimal or maximal type ´T_i´ of the solution space. +A _maximal_ type ´T_i´ will be chosen if the type parameter ´a_i´ appears [contravariantly](04-basic-declarations-and-definitions.html#variance-annotations) in the type ´T´ of the expression. +A _minimal_ type ´T_i´ will be chosen in all other situations, i.e. if the variable appears covariantly, non-variantly or not at all in the type ´T´. +We call such a substitution an _optimal solution_ of the given constraint system for the type ´T´. + +###### Case 3: Methods +The last case applies if the expression ´e´ appears in an application ´e(d_1, ..., d_m)´. +In that case ´T´ is a method type ´(p_1:R_1, ..., p_m:R_m)T'´. +Without loss of generality we can assume that the result type ´T'´ is a value type; if it is a method type we apply [eta-expansion](#eta-expansion-section) to +convert it to a function type. +One computes first the types ´S_j´ of the argument expressions ´d_j´, using two alternative schemes. +Each argument expression ´d_j´ is typed first with the expected type ´R_j´, in which the type parameters ´a_1, ..., a_n´ are taken as type constants. +If this fails, the argument ´d_j´ is typed instead with an expected type ´R_j'´ which results from ´R_j´ by replacing every type parameter in ´a_1, ..., a_n´ with _undefined_. + +In a second step, type arguments are inferred by solving a constraint system which relates the method's type with the expected type ´\mathit{pt}´ and the argument types ´S_1, ..., S_m´. +Solving the constraint system means finding a substitution ´\sigma´ of types ´T_i´ for the type parameters ´a_i´ such that + +- None of the inferred types ´T_i´ is a [singleton type](03-types.html#singleton-types) unless it is a singleton type corresponding to an object or a constant value definition or the corresponding bound ´U_i´ is a subtype of `scala.Singleton`. +- All type parameter bounds are respected, i.e. ´\sigma L_i <: \sigma a_i´ and ´\sigma a_i <: \sigma U_i´ for ´i = 1, ..., n´. +- The method's result type ´T'´ conforms to the expected type, i.e. ´\sigma T' <: \sigma \mathit{pt}´. +- Each argument type [weakly conforms](03-types.html#weak-conformance) to the corresponding formal parameter type, i.e. ´\sigma S_j <:_w \sigma R_j´ for ´j = 1, ..., m´. + +It is a compile time error if no such substitution exists. +If several solutions exist, an optimal one for the type ´T'´ is chosen. + +All or parts of an expected type ´\mathit{pt}´ may be undefined. +The rules for [conformance](03-types.html#conformance) are extended to this case by adding the rule that for any type ´T´ the following two statements are always true: ´\mathit{undefined} <: T´ and ´T <: \mathit{undefined}´ + +It is possible that no minimal or maximal solution for a type variable exists, in which case a compile-time error results. +Because ´<:´ is a pre-order, it is also possible that a solution set has several optimal solutions for a type. +In that case, a Scala compiler is free to pick any one of them. + +###### Example +Consider the two methods: + +```scala +def cons[A](x: A, xs: List[A]): List[A] = x :: xs +def nil[B]: List[B] = Nil +``` + +and the definition + +```scala +val xs = cons(1, nil) +``` + +The application of `cons` is typed with an undefined expected type. +This application is completed by local type inference to `cons[Int](1, nil)`. +Here, one uses the following reasoning to infer the type argument `Int` for the type parameter `a`: + +First, the argument expressions are typed. The first argument `1` has type `Int` whereas the second argument `nil` is itself polymorphic. +One tries to type-check `nil` with an expected type `List[a]`. +This leads to the constraint system + +```scala +List[b?] <: List[a] +``` + +where we have labeled `b?` with a question mark to indicate that it is a variable in the constraint system. +Because class `List` is covariant, the optimal solution of this constraint is + +```scala +b = scala.Nothing +``` + +In a second step, one solves the following constraint system for the type parameter `a` of `cons`: + +```scala +Int <: a? +List[scala.Nothing] <: List[a?] +List[a?] <: ´\mathit{undefined}´ +``` + +The optimal solution of this constraint system is + +```scala +a = Int +``` + +so `Int` is the type inferred for `a`. + +###### Example + +Consider now the definition + +```scala +val ys = cons("abc", xs) +``` + +where `xs` is defined of type `List[Int]` as before. +In this case local type inference proceeds as follows. + +First, the argument expressions are typed. +The first argument `"abc"` has type `String`. +The second argument `xs` is first tried to be typed with expected type `List[a]`. +This fails,as `List[Int]` is not a subtype of `List[a]`. +Therefore, the second strategy is tried; `xs` is now typed with expected type `List[´\mathit{undefined}´]`. +This succeeds and yields the argument type `List[Int]`. + +In a second step, one solves the following constraint system for the type parameter `a` of `cons`: + +```scala +String <: a? +List[Int] <: List[a?] +List[a?] <: ´\mathit{undefined}´ +``` + +The optimal solution of this constraint system is + +```scala +a = scala.Any +``` + +so `scala.Any` is the type inferred for `a`. + +### Eta Expansion + +_Eta-expansion_ converts an expression of method type to an equivalent expression of function type. +It proceeds in two steps. + +First, one identifies the maximal sub-expressions of ´e´; let's say these are ´e_1, ..., e_m´. +For each of these, one creates a fresh name ´x_i´. +Let ´e'´ be the expression resulting from replacing every maximal subexpression ´e_i´ in ´e´ by the corresponding fresh name ´x_i´. +Second, one creates a fresh name ´y_i´ for every argument type ´T_i´ of the method (´i = 1 , ..., n´). +The result of eta-conversion is then: + +```scala +{ val ´x_1´ = ´e_1´; + ... + val ´x_m´ = ´e_m´; + (´y_1: T_1, ..., y_n: T_n´) => ´e'´(´y_1, ..., y_n´) +} +``` + +The behavior of [call-by-name parameters](#function-applications) is preserved under eta-expansion: the corresponding actual argument expression, a sub-expression of parameterless method type, is not evaluated in the expanded block. + +### Dynamic Member Selection + +The standard Scala library defines a marker trait `scala.Dynamic`. +Subclasses of this trait are able to intercept selections and applications on their instances by defining methods of the names `applyDynamic`, `applyDynamicNamed`, `selectDynamic`, and `updateDynamic`. + +The following rewrites are performed, assuming ´e´'s type conforms to `scala.Dynamic`, and the original expression does not type check under the normal rules, as specified fully in the relevant subsection of [implicit conversion](#dynamic-member-selection): + + * `e.m[Ti](xi)` becomes `e.applyDynamic[Ti]("m")(xi)` + * `e.m[Ti]` becomes `e.selectDynamic[Ti]("m")` + * `e.m = x` becomes `e.updateDynamic("m")(x)` + +If any arguments are named in the application (one of the `xi` is of the shape `arg = x`), their name is preserved as the first component of the pair passed to `applyDynamicNamed` (for missing names, `""` is used): + + * `e.m[Ti](argi = xi)` becomes `e.applyDynamicNamed[Ti]("m")(("argi", xi))` + +Finally: + + * `e.m(x) = y` becomes `e.selectDynamic("m").update(x, y)` + +None of these methods are actually defined in the `scala.Dynamic`, so that users are free to define them with or without type parameters, or implicit arguments. diff --git a/docs/_spec/07-implicits.md b/docs/_spec/07-implicits.md new file mode 100644 index 000000000000..2cd80f227cd4 --- /dev/null +++ b/docs/_spec/07-implicits.md @@ -0,0 +1,407 @@ +--- +title: Implicits +layout: default +chapter: 7 +--- + +# Implicits + +## The Implicit Modifier + +```ebnf +LocalModifier ::= ‘implicit’ +ParamClauses ::= {ParamClause} [nl] ‘(’ ‘implicit’ Params ‘)’ +``` + +Template members and parameters labeled with an `implicit` modifier can be passed to [implicit parameters](#implicit-parameters) and can be used as implicit conversions called [views](#views). +The `implicit` modifier is illegal for all type members, as well as for [top-level objects](09-top-level-definitions.html#packagings). + +###### Example Monoid + +The following code defines an abstract class of monoids and two concrete implementations, `StringMonoid` and `IntMonoid`. +The two implementations are marked implicit. + +```scala +abstract class Monoid[A] extends SemiGroup[A] { + def unit: A + def add(x: A, y: A): A +} +object Monoids { + implicit object stringMonoid extends Monoid[String] { + def add(x: String, y: String): String = x.concat(y) + def unit: String = "" + } + implicit object intMonoid extends Monoid[Int] { + def add(x: Int, y: Int): Int = x + y + def unit: Int = 0 + } +} +``` + +## Implicit Parameters + +An _implicit parameter list_ `(implicit ´p_1´,...,´p_n´)` of a method marks the parameters ´p_1, ..., p_n´ as implicit. +A method or constructor can have only one implicit parameter list, and it must be the last parameter list given. + +A method with implicit parameters can be applied to arguments just like a normal method. +In this case the `implicit` label has no effect. +However, if such a method misses arguments for its implicit parameters, such arguments will be automatically provided. + +The actual arguments that are eligible to be passed to an implicit parameter of type ´T´ fall into two categories. +First, eligible are all identifiers ´x´ that can be accessed at the point of the method call without a prefix and that denote an [implicit definition](#the-implicit-modifier) or an implicit parameter. +To be accessible without a prefix, an identifier must be a local name, a member of an enclosing template or a name introduced by an [import clause](04-basic-declarations-and-definitions.html#import-clauses). +If there are no eligible identifiers under this rule, then, second, eligible are also all `implicit` members of some object that belongs to the implicit scope of the implicit parameter's type, ´T´. + +The _implicit scope_ of a type ´T´ consists of all [companion modules](05-classes-and-objects.html#object-definitions) of classes that are associated with the implicit parameter's type. +Here, we say a class ´C´ is _associated_ with a type ´T´ if it is a [base class](05-classes-and-objects.html#class-linearization) of some part of ´T´. + +The _parts_ of a type ´T´ are: + +- if ´T´ is a compound type `´T_1´ with ... with ´T_n´`, the union of the parts of ´T_1, ..., T_n´, as well as ´T´ itself; +- if ´T´ is a parameterized type `´S´[´T_1, ..., T_n´]`, the union of the parts of ´S´ and ´T_1, ..., T_n´; +- if ´T´ is a singleton type `´p´.type`, the parts of the type of ´p´; +- if ´T´ is a type projection `´S´#´U´`, the parts of ´S´ as well as ´T´ itself; +- if ´T´ is a type alias, the parts of its expansion; +- if ´T´ is an abstract type, the parts of its upper bound; +- if ´T´ denotes an implicit conversion to a type with a method with argument types ´T_1, ..., T_n´ and result type ´U´, the union of the parts of ´T_1, ..., T_n´ and ´U´; +- in all other cases, just ´T´ itself. + +Note that packages are internally represented as classes with companion modules to hold the package members. +Thus, implicits defined in a package object are part of the implicit scope of a type prefixed by that package. + +If there are several eligible arguments which match the implicit parameter's type, a most specific one will be chosen using the rules of static [overloading resolution](06-expressions.html#overloading-resolution). +If the parameter has a default argument and no implicit argument can be found the default argument is used. + +###### Example +Assuming the classes from the [`Monoid` example](#example-monoid), here is a method which computes the sum of a list of elements using the monoid's `add` and `unit` operations. + +```scala +def sum[A](xs: List[A])(implicit m: Monoid[A]): A = + if (xs.isEmpty) m.unit + else m.add(xs.head, sum(xs.tail)) +``` + +The monoid in question is marked as an implicit parameter, and can therefore be inferred based on the type of the list. +Consider for instance the call `sum(List(1, 2, 3))` in a context where `stringMonoid` and `intMonoid` are visible. +We know that the formal type parameter `a` of `sum` needs to be instantiated to `Int`. +The only eligible object which matches the implicit formal parameter type `Monoid[Int]` is `intMonoid` so this object will be passed as implicit parameter. + +This discussion also shows that implicit parameters are inferred after any type arguments are [inferred](06-expressions.html#local-type-inference). + +Implicit methods can themselves have implicit parameters. +An example is the following method from module `scala.List`, which injects lists into the `scala.Ordered` class, provided the element type of the list is also convertible to this type. + +```scala +implicit def list2ordered[A](x: List[A]) + (implicit elem2ordered: A => Ordered[A]): Ordered[List[A]] = + ... +``` + +Assume in addition a method + +```scala +implicit def int2ordered(x: Int): Ordered[Int] +``` + +that injects integers into the `Ordered` class. +We can now define a `sort` method over ordered lists: + +```scala +def sort[A](xs: List[A])(implicit a2ordered: A => Ordered[A]) = ... +``` + +We can apply `sort` to a list of lists of integers `yss: List[List[Int]]` as follows: + +```scala +sort(yss) +``` + +The call above will be completed by passing two nested implicit arguments: + +```scala +sort(yss)((xs: List[Int]) => list2ordered[Int](xs)(int2ordered)) +``` + +The possibility of passing implicit arguments to implicit arguments raises the possibility of an infinite recursion. +For instance, one might try to define the following method, which injects _every_ type into the `Ordered` class: + +```scala +implicit def magic[A](x: A)(implicit a2ordered: A => Ordered[A]): Ordered[A] = + a2ordered(x) +``` + +Now, if one tried to apply `sort` to an argument `arg` of a type that did not have another injection into the `Ordered` class, one would obtain an infinite expansion: + +```scala +sort(arg)(x => magic(x)(x => magic(x)(x => ... ))) +``` + +Such infinite expansions should be detected and reported as errors, however to support the deliberate implicit construction of recursive values we allow implicit arguments to be marked as by-name. +At call sites recursive uses of implicit values are permitted if they occur in an implicit by-name argument. + +Consider the following example, + +```scala +trait Foo { + def next: Foo +} + +object Foo { + implicit def foo(implicit rec: Foo): Foo = + new Foo { def next = rec } +} + +val foo = implicitly[Foo] +assert(foo eq foo.next) +``` + +As with the `magic` case above this diverges due to the recursive implicit argument `rec` of method `foo`. +If we mark the implicit argument as by-name, + +```scala +trait Foo { + def next: Foo +} + +object Foo { + implicit def foo(implicit rec: => Foo): Foo = + new Foo { def next = rec } +} + +val foo = implicitly[Foo] +assert(foo eq foo.next) +``` + +the example compiles with the assertion successful. + +When compiled, recursive by-name implicit arguments of this sort are extracted out as val members of a local synthetic object at call sites as follows, + +```scala +val foo: Foo = scala.Predef.implicitly[Foo]( + { + object LazyDefns$1 { + val rec$1: Foo = Foo.foo(rec$1) + // ^^^^^ + // recursive knot tied here + } + LazyDefns$1.rec$1 + } +) +assert(foo eq foo.next) +``` + +Note that the recursive use of `rec$1` occurs within the by-name argument of `foo` and is consequently deferred. +The desugaring matches what a programmer would do to construct such a recursive value explicitly. + +To prevent infinite expansions, such as the `magic` example above, the compiler keeps track of a stack of “open implicit types” for which implicit arguments are currently being searched. +Whenever an implicit argument for type ´T´ is searched, ´T´ is added to the stack paired with the implicit definition which produces it, and whether it was required to satisfy a by-name implicit argument or not. +The type is removed from the stack once the search for the implicit argument either definitely fails or succeeds. +Everytime a type is about to be added to the stack, it is checked against existing entries which were produced by the same implicit definition and then, + ++ if it is equivalent to some type which is already on the stack and there is a by-name argument between that entry and the top of the stack. +In this case the search for that type succeeds immediately and the implicit argument is compiled as a recursive reference to the found argument. +That argument is added as an entry in the synthesized implicit dictionary if it has not already been added. ++ otherwise if the _core_ of the type _dominates_ the core of a type already on the stack, then the implicit expansion is said to _diverge_ and the search for that type fails immediately. ++ otherwise it is added to the stack paired with the implicit definition which produces it. +Implicit resolution continues with the implicit arguments of that definition (if any). + +Here, the _core type_ of ´T´ is ´T´ with aliases expanded, top-level type [annotations](11-annotations.html#user-defined-annotations) and [refinements](03-types.html#compound-types) removed, and occurrences of top-level existentially bound variables replaced by their upper bounds. + +A core type ´T´ _dominates_ a type ´U´ if ´T´ is [equivalent](03-types.html#equivalence) to ´U´, or if the top-level type constructors of ´T´ and ´U´ have a common element and ´T´ is more complex than ´U´ and the _covering sets_ of ´T´ and ´U´ are equal. + +The set of _top-level type constructors_ ´\mathit{ttcs}(T)´ of a type ´T´ depends on the form of the type: + +- For a type designator, ´\mathit{ttcs}(p.c) ~=~ \{c\}´; +- For a parameterized type, ´\mathit{ttcs}(p.c[\mathit{targs}]) ~=~ \{c\}´; +- For a singleton type, ´\mathit{ttcs}(p.type) ~=~ \mathit{ttcs}(T)´, provided ´p´ has type ´T´; +- For a compound type, `´\mathit{ttcs}(T_1´ with ... with ´T_n)´` ´~=~ \mathit{ttcs}(T_1) \cup ... \cup \mathit{ttcs}(T_n)´. + +The _complexity_ ´\operatorname{complexity}(T)´ of a core type is an integer which also depends on the form of the type: + +- For a type designator, ´\operatorname{complexity}(p.c) ~=~ 1 + \operatorname{complexity}(p)´ +- For a parameterized type, ´\operatorname{complexity}(p.c[\mathit{targs}]) ~=~ 1 + \Sigma \operatorname{complexity}(\mathit{targs})´ +- For a singleton type denoting a package ´p´, ´\operatorname{complexity}(p.type) ~=~ 0´ +- For any other singleton type, ´\operatorname{complexity}(p.type) ~=~ 1 + \operatorname{complexity}(T)´, provided ´p´ has type ´T´; +- For a compound type, `´\operatorname{complexity}(T_1´ with ... with ´T_n)´` ´= \Sigma\operatorname{complexity}(T_i)´ + +The _covering set_ ´\mathit{cs}(T)´ of a type ´T´ is the set of type designators mentioned in a type. +For example, given the following, + +```scala +type A = List[(Int, Int)] +type B = List[(Int, (Int, Int))] +type C = List[(Int, String)] +``` + +the corresponding covering sets are: + +- ´\mathit{cs}(A)´: List, Tuple2, Int +- ´\mathit{cs}(B)´: List, Tuple2, Int +- ´\mathit{cs}(C)´: List, Tuple2, Int, String + +###### Example +When typing `sort(xs)` for some list `xs` of type `List[List[List[Int]]]`, the sequence of types for which implicit arguments are searched is + +```scala +List[List[Int]] => Ordered[List[List[Int]]], +List[Int] => Ordered[List[Int]], +Int => Ordered[Int] +``` + +All types share the common type constructor `scala.Function1`, but the complexity of each new type is lower than the complexity of the previous types. +Hence, the code typechecks. + +###### Example +Let `ys` be a list of some type which cannot be converted to `Ordered`. +For instance: + +```scala +val ys = List(new IllegalArgumentException, new ClassCastException, new Error) +``` + +Assume that the definition of `magic` above is in scope. +Then the sequence of types for which implicit arguments are searched is + +```scala +Throwable => Ordered[Throwable], +Throwable => Ordered[Throwable], +... +``` + +Since the second type in the sequence is equal to the first, the compiler will issue an error signalling a divergent implicit expansion. + + +## Views + +Implicit parameters and methods can also define implicit conversions called views. +A _view_ from type ´S´ to type ´T´ is defined by an implicit value which has function type `´S´ => ´T´` or `(=> ´S´) => ´T´` or by a method convertible to a value of that type. + +Views are applied in three situations: + +1. If an expression ´e´ is of type ´T´, and ´T´ does not conform to the expression's expected type ´\mathit{pt}´. +In this case an implicit ´v´ is searched which is applicable to ´e´ and whose result type conforms to ´\mathit{pt}´. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of `´T´ => ´\mathit{pt}´`. +If such a view is found, the expression ´e´ is converted to `´v´(´e´)`. +1. In a selection ´e.m´ with ´e´ of type ´T´, if the selector ´m´ does not denote an accessible member of ´T´. +In this case, a view ´v´ is searched which is applicable to ´e´ and whose result contains a member named ´m´. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of ´T´. +If such a view is found, the selection ´e.m´ is converted to `´v´(´e´).´m´`. +1. In a selection ´e.m(\mathit{args})´ with ´e´ of type ´T´, if the selector ´m´ denotes some member(s) of ´T´, but none of these members is applicable to the arguments ´\mathit{args}´. +In this case a view ´v´ is searched which is applicable to ´e´ and whose result contains a method ´m´ which is applicable to ´\mathit{args}´. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of ´T´. If such a view is found, the selection ´e.m´ is converted to `´v´(´e´).´m(\mathit{args})´`. + +The implicit view, if it is found, can accept its argument ´e´ as a call-by-value or as a call-by-name parameter. +However, call-by-value implicits take precedence over call-by-name implicits. + +As for implicit parameters, overloading resolution is applied if there are several possible candidates (of either the call-by-value or the call-by-name category). + +###### Example Ordered + +Class `scala.Ordered[A]` contains a method + +```scala + def <= [B >: A](that: B)(implicit b2ordered: B => Ordered[B]): Boolean +``` + +Assume two lists `xs` and `ys` of type `List[Int]` and assume that the `list2ordered` and `int2ordered` methods defined [here](#implicit-parameters) are in scope. +Then the operation + +```scala + xs <= ys +``` + +is legal, and is expanded to: + +```scala + list2ordered(xs)(int2ordered).<= + (ys) + (xs => list2ordered(xs)(int2ordered)) +``` + +The first application of `list2ordered` converts the list `xs` to an instance of class `Ordered`, whereas the second occurrence is part of an implicit parameter passed to the `<=` method. + +## Context Bounds and View Bounds + +```ebnf + TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + {‘<%’ Type} {‘:’ Type} +``` + +A type parameter ´A´ of a method or non-trait class may have one or more view bounds `´A´ <% ´T´`. +In this case the type parameter may be instantiated to any type ´S´ which is convertible by application of a view to the bound ´T´. + +A type parameter ´A´ of a method or non-trait class may also have one or more context bounds `´A´ : ´T´`. +In this case the type parameter may be instantiated to any type ´S´ for which _evidence_ exists at the instantiation point that ´S´ satisfies the bound ´T´. +Such evidence consists of an implicit value with type ´T[S]´. + +A method or class containing type parameters with view or context bounds is treated as being equivalent to a method with implicit parameters. +Consider first the case of a single parameter with view and/or context bounds such as: + +```scala +def ´f´[´A´ <% ´T_1´ ... <% ´T_m´ : ´U_1´ : ´U_n´](´\mathit{ps}´): ´R´ = ... +``` + +Then the method definition above is expanded to + +```scala +def ´f´[´A´](´\mathit{ps}´)(implicit ´v_1´: ´A´ => ´T_1´, ..., ´v_m´: ´A´ => ´T_m´, + ´w_1´: ´U_1´[´A´], ..., ´w_n´: ´U_n´[´A´]): ´R´ = ... +``` + +where the ´v_i´ and ´w_j´ are fresh names for the newly introduced implicit parameters. +These parameters are called _evidence parameters_. + +If a class or method has several view- or context-bounded type parameters, each such type parameter is expanded into evidence parameters in the order they appear and all the resulting evidence parameters are concatenated in one implicit parameter section. +Since traits do not take constructor parameters, this translation does not work for them. +Consequently, type-parameters in traits may not be view- or context-bounded. + +Evidence parameters are prepended to the existing implicit parameter section, if one exists. + +For example: + +```scala +def foo[A: M](implicit b: B): C +// expands to: +// def foo[A](implicit evidence´1: M[A], b: B): C +``` + +###### Example +The `<=` method from the [`Ordered` example](#example-ordered) can be declared more concisely as follows: + +```scala +def <= [B >: A <% Ordered[B]](that: B): Boolean +``` + +## Manifests + +Manifests are type descriptors that can be automatically generated by the Scala compiler as arguments to implicit parameters. +The Scala standard library contains a hierarchy of four manifest classes, with `OptManifest` at the top. +Their signatures follow the outline below. + +```scala +trait OptManifest[+T] +object NoManifest extends OptManifest[Nothing] +trait ClassManifest[T] extends OptManifest[T] +trait Manifest[T] extends ClassManifest[T] +``` + +If an implicit parameter of a method or constructor is of a subtype ´M[T]´ of class `OptManifest[T]`, _a manifest is determined for ´M[S]´_, according to the following rules. + +First if there is already an implicit argument that matches ´M[T]´, this argument is selected. + +Otherwise, let ´\mathit{Mobj}´ be the companion object `scala.reflect.Manifest` if ´M´ is trait `Manifest`, or be the companion object `scala.reflect.ClassManifest` otherwise. +Let ´M'´ be the trait `Manifest` if ´M´ is trait `Manifest`, or be the trait `OptManifest` otherwise. +Then the following rules apply. + +1. If ´T´ is a value class or one of the classes `Any`, `AnyVal`, `Object`, `Null`, or `Nothing`, a manifest for it is generated by selecting the corresponding manifest value `Manifest.´T´`, which exists in the `Manifest` module. +1. If ´T´ is an instance of `Array[´S´]`, a manifest is generated with the invocation `´\mathit{Mobj}´.arrayType[S](m)`, where ´m´ is the manifest determined for ´M[S]´. +1. If ´T´ is some other class type ´S´#´C[U_1, ..., U_n]´ where the prefix type ´S´ cannot be statically determined from the class ´C´, a manifest is generated with the invocation `´\mathit{Mobj}´.classType[T](´m_0´, classOf[T], ´ms´)` where ´m_0´ is the manifest determined for ´M'[S]´ and ´ms´ are the manifests determined for ´M'[U_1], ..., M'[U_n]´. +1. If ´T´ is some other class type with type arguments ´U_1, ..., U_n´, a manifest is generated with the invocation `´\mathit{Mobj}´.classType[T](classOf[T], ´ms´)` where ´ms´ are the manifests determined for ´M'[U_1], ..., M'[U_n]´. +1. If ´T´ is a singleton type `´p´.type`, a manifest is generated with the invocation `´\mathit{Mobj}´.singleType[T](´p´)` +1. If ´T´ is a refined type ´T' \{ R \}´, a manifest is generated for ´T'´. +(That is, refinements are never reflected in manifests). +1. If ´T´ is an intersection type `´T_1´ with ... with ´T_n´` where ´n > 1´, the result depends on whether a full manifest is to be determined or not. +If ´M´ is trait `Manifest`, then a manifest is generated with the invocation `Manifest.intersectionType[T](´ms´)` where ´ms´ are the manifests determined for ´M[T_1], ..., M[T_n]´. +Otherwise, if ´M´ is trait `ClassManifest`, then a manifest is generated for the [intersection dominator](03-types.html#type-erasure) of the types ´T_1, ..., T_n´. +1. If ´T´ is some other type, then if ´M´ is trait `OptManifest`, a manifest is generated from the designator `scala.reflect.NoManifest`. +If ´M´ is a type different from `OptManifest`, a static error results. diff --git a/docs/_spec/08-pattern-matching.md b/docs/_spec/08-pattern-matching.md new file mode 100644 index 000000000000..1d50b814ee24 --- /dev/null +++ b/docs/_spec/08-pattern-matching.md @@ -0,0 +1,641 @@ +--- +title: Pattern Matching +layout: default +chapter: 8 +--- + +# Pattern Matching + +## Patterns + +```ebnf + Pattern ::= Pattern1 { ‘|’ Pattern1 } + Pattern1 ::= boundvarid ‘:’ TypePat + | ‘_’ ‘:’ TypePat + | Pattern2 + Pattern2 ::= id [‘@’ Pattern3] + | Pattern3 + Pattern3 ::= SimplePattern + | SimplePattern {id [nl] SimplePattern} + SimplePattern ::= ‘_’ + | varid + | Literal + | StableId + | StableId ‘(’ [Patterns] ‘)’ + | StableId ‘(’ [Patterns ‘,’] [id ‘@’] ‘_’ ‘*’ ‘)’ + | ‘(’ [Patterns] ‘)’ + | XmlPattern + Patterns ::= Pattern {‘,’ Patterns} +``` + +A pattern is built from constants, constructors, variables and type tests. +Pattern matching tests whether a given value (or sequence of values) has the shape defined by a pattern, and, if it does, binds the variables in the pattern to the corresponding components of the value (or sequence of values). +The same variable name may not be bound more than once in a pattern. + +###### Example +Some examples of patterns are: + 1. The pattern `ex: IOException` matches all instances of class `IOException`, binding variable `ex` to the instance. + 1. The pattern `Some(x)` matches values of the form `Some(´v´)`, binding `x` to the argument value ´v´ of the `Some` constructor. + 1. The pattern `(x, _)` matches pairs of values, binding `x` to the first component of the pair. The second component is matched with a wildcard pattern. + 1. The pattern `x :: y :: xs` matches lists of length ´\geq 2´, binding `x` to the list's first element, `y` to the list's second element, and `xs` to the remainder. + 1. The pattern `1 | 2 | 3` matches the integers between 1 and 3. + +Pattern matching is always done in a context which supplies an expected type of the pattern. +We distinguish the following kinds of patterns. + +### Variable Patterns + +```ebnf + SimplePattern ::= ‘_’ + | varid +``` + +A _variable pattern_ ´x´ is a simple identifier which starts with a lower case letter. +It matches any value, and binds the variable name to that value. +The type of ´x´ is the expected type of the pattern as given from outside. +A special case is the wild-card pattern `_` which is treated as if it was a fresh variable on each occurrence. + +### Typed Patterns + +```ebnf + Pattern1 ::= varid ‘:’ TypePat + | ‘_’ ‘:’ TypePat +``` + +A _typed pattern_ ´x: T´ consists of a pattern variable ´x´ and a type pattern ´T´. +The type of ´x´ is the type pattern ´T´, where each type variable and wildcard is replaced by a fresh, unknown type. +This pattern matches any value matched by the [type pattern](#type-patterns) ´T´; it binds the variable name to that value. + +### Pattern Binders + +```ebnf + Pattern2 ::= varid ‘@’ Pattern3 +``` + +A _pattern binder_ `´x´@´p´` consists of a pattern variable ´x´ and a pattern ´p´. +The type of the variable ´x´ is the static type ´T´ implied by the pattern ´p´. +This pattern matches any value ´v´ matched by the pattern ´p´, and it binds the variable name to that value. + +A pattern ´p´ _implies_ a type ´T´ if the pattern matches only values of the type ´T´. + +### Literal Patterns + +```ebnf + SimplePattern ::= Literal +``` + +A _literal pattern_ ´L´ matches any value that is equal (in terms of `==`) to the literal ´L´. +The type of ´L´ must conform to the expected type of the pattern. + +### Interpolated string patterns + +```ebnf + Literal ::= interpolatedString +``` + +The expansion of interpolated string literals in patterns is the same as in expressions. +If it occurs in a pattern, a interpolated string literal of either of the forms +``` +id"text0{ pat1 }text1 ... { patn }textn" +id"""text0{ pat1 }text1 ... { patn }textn""" +``` +is equivalent to: +``` +StringContext("""text0""", ..., """textn""").id(pat1, ..., patn) +``` +You could define your own `StringContext` to shadow the default one that's in the `scala` package. + +This expansion is well-typed if the member `id` evaluates to an extractor object. +If the extractor object has `apply` as well as `unapply` or `unapplySeq` methods, processed strings can be used as either expressions or patterns. + +Taking XML as an example +```scala +implicit class XMLinterpolation(s: StringContext) = { + object xml { + def apply(exprs: Any*) = + // parse ‘s’ and build an XML tree with ‘exprs’ + //in the holes + def unapplySeq(xml: Node): Option[Seq[Node]] = + // match `s’ against `xml’ tree and produce + //subtrees in holes + } +} +``` +Then, XML pattern matching could be expressed like this: +```scala +case xml""" + + $linktext + + """ => ... +``` +where linktext is a variable bound by the pattern. + +### Stable Identifier Patterns + +```ebnf + SimplePattern ::= StableId +``` + +A _stable identifier pattern_ is a [stable identifier](03-types.html#paths) ´r´. +The type of ´r´ must conform to the expected type of the pattern. +The pattern matches any value ´v´ such that `´r´ == ´v´` (see [here](12-the-scala-standard-library.html#root-classes)). + +To resolve the syntactic overlap with a variable pattern, a stable identifier pattern may not be a simple name starting with a lower-case letter. +However, it is possible to enclose such a variable name in backquotes; then it is treated as a stable identifier pattern. + +###### Example +Consider the following class definition: + +```scala +class C { c => + val x = 42 + val y = 27 + val Z = 8 + def f(x: Int) = x match { + case c.x => 1 // matches 42 + case `y` => 2 // matches 27 + case Z => 3 // matches 8 + case x => 4 // matches any value + } +} +``` + +Here, the first three patterns are stable identifier patterns, while the last one is a variable pattern. + +### Constructor Patterns + +```ebnf +SimplePattern ::= StableId ‘(’ [Patterns] ‘)’ +``` + +A _constructor pattern_ is of the form ´c(p_1, ..., p_n)´ where ´n \geq 0´. It consists of a stable identifier ´c´, followed by element patterns ´p_1, ..., p_n´. +The constructor ´c´ is a simple or qualified name which denotes a [case class](05-classes-and-objects.html#case-classes). +If the case class is monomorphic, then it must conform to the expected type of the pattern, and the formal parameter types of ´x´'s [primary constructor](05-classes-and-objects.html#class-definitions) are taken as the expected types of the element patterns ´p_1, ..., p_n´. +If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of ´c´ conforms to the expected type of the pattern. +The instantiated formal parameter types of ´c´'s primary constructor are then taken as the expected types of the component patterns ´p_1, ..., p_n´. +The pattern matches all objects created from constructor invocations ´c(v_1, ..., v_n)´ where each element pattern ´p_i´ matches the corresponding value ´v_i´. + +A special case arises when ´c´'s formal parameter types end in a repeated parameter. +This is further discussed [here](#pattern-sequences). + +### Tuple Patterns + +```ebnf + SimplePattern ::= ‘(’ [Patterns] ‘)’ +``` + +A _tuple pattern_ `(´p_1´, ..., ´p_n´)` where ´n \geq 2´ is equivalent to `´p_1´ *: ... *: ´p_n´ *: scala.EmptyTuple`. + +Notes: +- `()` is equivalent to `_: scala.Unit`, and not `scala.EmptyTuple`. +- `(´pat´)` is a pattern matching ´pat´, and not `´pat´ *: scala.EmptyTuple`. +- As such patterns with `*:` are slow, a more efficient translation is free to be implemented. For example, `(´p_1´, ´p_2´)` could be translated to `scala.Tuple2(´p_1´, ´p_2´)`, which is indeed equivalent to `´p_1´ *: ´p_2´ *: scala.EmptyTuple`. + +### Extractor Patterns + +```ebnf + SimplePattern ::= StableId ‘(’ [Patterns] ‘)’ +``` + +An _extractor pattern_ ´x(p_1, ..., p_n)´ where ´n \geq 0´ is of the same syntactic form as a constructor pattern. +However, instead of a case class, the stable identifier ´x´ denotes an object which has a member method named `unapply` or `unapplySeq` that matches the pattern. + +An extractor pattern cannot match the value `null`. The implementation ensures that the `unapply`/`unapplySeq` method is not applied to `null`. + +A type is said to be an _extractor type_ for some type `T` if it has a method `get` with return type `T`, and a method `isEmpty` with a return type that conforms to `Boolean`. +`Option[T]` is an extractor type for type `T`. + +An `unapply` method in an object ´x´ _matches_ the pattern ´x(p_1, ..., p_n)´ if it has a single parameter (and, optionally, an implicit parameter list) and one of the following applies: + +* ´n=0´ and `unapply`'s result type conforms to `Boolean`. +In this case the extractor pattern matches all values ´v´ for which `´x´.unapply(´v´)` yields `true`. +* ´n=1´ and `unapply`'s result type is an extractor type for some type ´T´. +In this case, the (only) argument pattern ´p_1´ is typed in turn with expected type ´T´. +The extractor pattern matches then all values ´v´ for which `´x´.unapply(´v´)` yields a value ´u´ for which `´u´.isEmpty` yields `false`, `´u´.get` yields a value ´v_1´, and ´p_1´ matches ´v_1´. +* ´n>1´ and `unapply`'s result type is an extractor type for some type ´T´ with members ´\_1, ..., \_n´ returning types ´T_1, ..., T_n´. +In this case, the argument patterns ´p_1, ..., p_n´ are typed in turn with expected types ´T_1 , ..., T_n´. +The extractor pattern matches then all values ´v´ for which `´x´.unapply(´v´)` yields a value ´u´ for which `´u´.isEmpty` yields `false`, `´u´.get` yields some value ´t´, and each pattern ´p_i´ matches the corresponding value ´t._1´ from ´t._1, ..., t._n´. + +An `unapplySeq` method in an object ´x´ matches the pattern ´x(q_1, ..., q_m, p_1, ..., p_n)´ if it takes exactly one argument and its result type is of the form `Option[(´T_1, ..., T_m´, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). +This case is further discussed [below](#pattern-sequences). + +###### Example 1 + +If we define an extractor object `Pair`: + +```scala +object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) +} +``` + +This means that the name `Pair` can be used in place of `Tuple2` for tuple formation as well as for deconstruction of tuples in patterns. +Hence, the following is possible: + +```scala +val x = (1, 2) +val y = x match { + case Pair(i, s) => Pair(s + i, i * i) +} +``` + +###### Example 2 + +If we define a class `NameBased` + +```scala +class NameBased[A, B](a: A, b: B) { + def isEmpty = false + def get = this + def _1 = a + def _2 = b +} +``` + +Then `NameBased` is an extractor type for `NameBased` itself, since it has a member `isEmpty` returning a value of type Boolean, and it has a member `get` returning a value of type `NameBased`. + +Since it also has members `_1` and `_2`, it can be used in an extractor pattern with n = 2 as follows: + +```scala +object Extractor { + def unapply(x: Any) = new NameBased(1, "two") +} + +"anything" match { + case Extractor(a, b) => println(s"\$a, \$b") //prints "1, two" +} +``` + + +### Pattern Sequences + +```ebnf +SimplePattern ::= StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’ +``` + +A _pattern sequence_ ´p_1, ..., p_n´ appears in two contexts. +First, in a constructor pattern ´c(q_1, ..., q_m, p_1, ..., p_n)´, where ´c´ is a case class which has ´m+1´ primary constructor parameters, ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`. +Second, in an extractor pattern ´x(q_1, ..., q_m, p_1, ..., p_n)´ if the extractor object ´x´ does not have an `unapply` method, but it does define an `unapplySeq` method with a result type that is an extractor type for type `(T_1, ... , T_m, Seq[S])` (if `m = 0`, an extractor type for the type `Seq[S]` is also accepted). The expected type for the patterns ´p_i´ is ´S´. + +The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`. +Each element pattern ´p_i´ is type-checked with ´S´ as expected type, unless it is a sequence wildcard. +If a final sequence wildcard is present, the pattern matches all values ´v´ that are sequences which start with elements matching patterns ´p_1, ..., p_{n-1}´. +If no final sequence wildcard is given, the pattern matches all values ´v´ that are sequences of length ´n´ which consist of elements matching patterns ´p_1, ..., p_n´. + +### Infix Operation Patterns + +```ebnf + Pattern3 ::= SimplePattern {id [nl] SimplePattern} +``` + +An _infix operation pattern_ ´p;\mathit{op};q´ is a shorthand for the +constructor or extractor pattern ´\mathit{op}(p, q)´. +The precedence and associativity of operators in patterns is the same as in [expressions](06-expressions.html#prefix,-infix,-and-postfix-operations). + +An infix operation pattern ´p;\mathit{op};(q_1, ..., q_n)´ is a shorthand for the constructor or extractor pattern ´\mathit{op}(p, q_1, ..., q_n)´. + +### Pattern Alternatives + +```ebnf + Pattern ::= Pattern1 { ‘|’ Pattern1 } +``` + +A _pattern alternative_ `´p_1´ | ... | ´p_n´` consists of a number of alternative patterns ´p_i´. +All alternative patterns are type checked with the expected type of the pattern. +They may not bind variables other than wildcards. +The alternative pattern matches a value ´v´ if at least one its alternatives matches ´v´. + +### XML Patterns + +XML patterns are treated [here](10-xml-expressions-and-patterns.html#xml-patterns). + +### Regular Expression Patterns + +Regular expression patterns have been discontinued in Scala from version 2.0. + +Later version of Scala provide a much simplified version of regular expression patterns that cover most scenarios of non-text sequence processing. +A _sequence pattern_ is a pattern that stands in a position where either (1) a pattern of a type `T` which is conforming to `Seq[A]` for some `A` is expected, or (2) a case class constructor that has an iterated formal parameter `A*`. +A wildcard star pattern `_*` in the rightmost position stands for arbitrary long sequences. +It can be bound to variables using `@`, as usual, in which case the variable will have the type `Seq[A]`. + +### Irrefutable Patterns + +A pattern ´p´ is _irrefutable_ for a type ´T´, if one of the following applies: + +1. ´p´ is a variable pattern, +1. ´p´ is a typed pattern ´x: T'´, and ´T <: T'´, +1. ´p´ is a constructor pattern ´c(p_1, ..., p_n)´, the type ´T´ is an instance of class ´c´, the [primary constructor](05-classes-and-objects.html#class-definitions) of type ´T´ has argument types ´T_1, ..., T_n´, and each ´p_i´ is irrefutable for ´T_i´. +1. ´p´ is an extractor pattern for which the extractor type is `Some[´T´]` for some type ´T´ +1. ´p´ is an extractor pattern for which the extractor types `isEmpty` method is the singleton type `false` +1. ´p´ is an extractor pattern for which the return type is the singleton type `true` + +## Type Patterns + +```ebnf + TypePat ::= Type +``` + +Type patterns consist of types, type variables, and wildcards. +A type pattern ´T´ is of one of the following forms: + +* A reference to a class ´C´, ´p.C´, or `´T´#´C´`. +This type pattern matches any non-null instance of the given class. +Note that the prefix of the class, if it exists, is relevant for determining class instances. +For instance, the pattern ´p.C´ matches only instances of classes ´C´ which were created with the path ´p´ as prefix. +This also applies to prefixes which are not given syntactically. +For example, if ´C´ refers to a class defined in the nearest enclosing class and is thus equivalent to ´this.C´, it is considered to have a prefix. + +The bottom types `scala.Nothing` and `scala.Null` cannot be used as type patterns, because they would match nothing in any case. + +* A singleton type `´p´.type`. This type pattern matches only the value denoted by the path ´p´ (the `eq` method is used to compare the matched value to ´p´). + +* A literal type `´lit´`. This type pattern matches only the value denoted by the literal ´lit´ (the `==` method is used to compare the matched value to ´lit´). + +* A compound type pattern `´T_1´ with ... with ´T_n´` where each ´T_i´ is a type pattern. +This type pattern matches all values that are matched by each of the type patterns ´T_i´. + +* A parameterized type pattern ´T[a_1, ..., a_n]´, where the ´a_i´ are type variable patterns or wildcards `_`. +This type pattern matches all values which match ´T´ for some arbitrary instantiation of the type variables and wildcards. +The bounds or alias type of these type variable are determined as described [here](#type-parameter-inference-in-patterns). + +* A parameterized type pattern `scala.Array´[T_1]´`, where ´T_1´ is a type pattern. +This type pattern matches any non-null instance of type `scala.Array´[U_1]´`, where ´U_1´ is a type matched by ´T_1´. + +Types which are not of one of the forms described above are also accepted as type patterns. +However, such type patterns will be translated to their [erasure](03-types.html#type-erasure). +The Scala compiler will issue an "unchecked" warning for these patterns to flag the possible loss of type-safety. + +A _type variable pattern_ is a simple identifier which starts with a lower case letter. + +## Type Parameter Inference in Patterns + +Type parameter inference is the process of finding bounds for the bound type variables in a typed pattern or constructor pattern. +Inference takes into account the expected type of the pattern. + +### Type parameter inference for typed patterns + +Assume a typed pattern ´p: T'´. Let ´T´ result from ´T'´ where all wildcards in ´T'´ are renamed to fresh variable names. +Let ´a_1, ..., a_n´ be the type variables in ´T´. +These type variables are considered bound in the pattern. +Let the expected type of the pattern be ´\mathit{pt}´. + +Type parameter inference constructs first a set of subtype constraints over the type variables ´a_i´. +The initial constraints set ´\mathcal{C}\_0´ reflects just the bounds of these type variables. +That is, assuming ´T´ has bound type variables ´a_1, ..., a_n´ which correspond to class type parameters ´a_1', ..., a_n'´ with lower bounds ´L_1, ..., L_n´ and upper bounds ´U_1, ..., U_n´, ´\mathcal{C}_0´ contains the constraints + +$$ +\begin{cases} +a_i &<: \sigma U_i & \quad (i = 1, ..., n) \\\\ +\sigma L_i &<: a_i & \quad (i = 1, ..., n) +\end{cases} +$$ + +where ´\sigma´ is the substitution ´[a_1' := a_1, ..., a_n' :=a_n]´. + +The set ´\mathcal{C}_0´ is then augmented by further subtype constraints. +There are two cases. + +###### Case 1 +If there exists a substitution ´\sigma´ over the type variables ´a_i, ..., a_n´ such that ´\sigma T´ conforms to ´\mathit{pt}´, one determines the weakest subtype constraints ´\mathcal{C}\_1´ over the type variables ´a_1, ..., a_n´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}_1´ implies that ´T´ conforms to ´\mathit{pt}´. + +###### Case 2 +Otherwise, if ´T´ can not be made to conform to ´\mathit{pt}´ by instantiating its type variables, one determines all type variables in ´\mathit{pt}´ which are defined as type parameters of a method enclosing the pattern. +Let the set of such type parameters be ´b_1 , ..., b_m´. +Let ´\mathcal{C}\_0'´ be the subtype constraints reflecting the bounds of the type variables ´b_i´. +If ´T´ denotes an instance type of a final class, let ´\mathcal{C}\_2´ be the weakest set of subtype constraints over the type variables ´a_1, ..., a_n´ and ´b_1, ..., b_m´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2´ implies that ´T´ conforms to ´\mathit{pt}´. +If ´T´ does not denote an instance type of a final class, let ´\mathcal{C}\_2´ be the weakest set of subtype constraints over the type variables ´a_1, ..., a_n´ and ´b_1, ..., b_m´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2´ implies that it is possible to construct a type ´T'´ which conforms to both ´T´ and ´\mathit{pt}´. +It is a static error if there is no satisfiable set of constraints ´\mathcal{C}\_2´ with this property. + +The final step consists in choosing type bounds for the type variables which imply the established constraint system. +The process is different for the two cases above. + +###### Case 1 +We take ´a_i >: L_i <: U_i´ where each ´L_i´ is minimal and each ´U_i´ is maximal wrt ´<:´ such that ´a_i >: L_i <: U_i´ for ´i = 1, ..., n´ implies ´\mathcal{C}\_0 \wedge \mathcal{C}\_1´. + +###### Case 2 +We take ´a_i >: L_i <: U_i´ and ´b\_i >: L_i' <: U_i' ´ where each ´L_i´ and ´L_j'´ is minimal and each ´U_i´ and ´U_j'´ is maximal such that ´a_i >: L_i <: U_i´ for ´i = 1, ..., n´ and ´b_j >: L_j' <: U_j'´ for ´j = 1, ..., m´ implies ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}_2´. + +In both cases, local type inference is permitted to limit the complexity of inferred bounds. +Minimality and maximality of types have to be understood relative to the set of types of acceptable complexity. + +### Type parameter inference for constructor patterns +Assume a constructor pattern ´C(p_1, ..., p_n)´ where class ´C´ has type parameters ´a_1, ..., a_n´. +These type parameters are inferred in the same way as for the typed pattern `(_: ´C[a_1, ..., a_n]´)`. + +###### Example +Consider the program fragment: + +```scala +val x: Any +x match { + case y: List[a] => ... +} +``` + +Here, the type pattern `List[a]` is matched against the expected type `Any`. +The pattern binds the type variable `a`. +Since `List[a]` conforms to `Any` for every type argument, there are no constraints on `a`. +Hence, `a` is introduced as an abstract type with no bounds. +The scope of `a` is right-hand side of its case clause. + +On the other hand, if `x` is declared as + +```scala +val x: List[List[String]], +``` + +this generates the constraint `List[a] <: List[List[String]]`, which simplifies to `a <: List[String]`, because `List` is covariant. +Hence, `a` is introduced with upper bound `List[String]`. + +###### Example +Consider the program fragment: + +```scala +val x: Any +x match { + case y: List[String] => ... +} +``` + +Scala does not maintain information about type arguments at run-time, so there is no way to check that `x` is a list of strings. +Instead, the Scala compiler will [erase](03-types.html#type-erasure) the pattern to `List[_]`; that is, it will only test whether the top-level runtime-class of the value `x` conforms to `List`, and the pattern match will succeed if it does. +This might lead to a class cast exception later on, in the case where the list `x` contains elements other than strings. +The Scala compiler will flag this potential loss of type-safety with an "unchecked" warning message. + +###### Example +Consider the program fragment + +```scala +class Term[A] +class Number(val n: Int) extends Term[Int] +def f[B](t: Term[B]): B = t match { + case y: Number => y.n +} +``` + +The expected type of the pattern `y: Number` is `Term[B]`. +The type `Number` does not conform to `Term[B]`; hence Case 2 of the rules above applies. +This means that `B` is treated as another type variable for which subtype constraints are inferred. +In our case the applicable constraint is `Number <: Term[B]`, which entails `B = Int`. Hence, `B` is treated in the case clause as an abstract type with lower and upper bound `Int`. +Therefore, the right hand side of the case clause, `y.n`, of type `Int`, is found to conform to the method's declared result type, `Number`. + +## Pattern Matching Expressions + +```ebnf + Expr ::= PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ + CaseClauses ::= CaseClause {CaseClause} + CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block +``` + +A _pattern matching expression_ + +```scala +e match { case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ } +``` + +consists of a selector expression ´e´ and a number ´n > 0´ of cases. +Each case consists of a (possibly guarded) pattern ´p_i´ and a block ´b_i´. +Each ´p_i´ might be complemented by a guard `if ´e´` where ´e´ is a boolean expression. +The scope of the pattern variables in ´p_i´ comprises the pattern's guard and the corresponding block ´b_i´. + +Let ´T´ be the type of the selector expression ´e´ and let ´a_1, ..., a_m´ be the type parameters of all methods enclosing the pattern matching expression. +For every ´a_i´, let ´L_i´ be its lower bound and ´U_i´ be its higher bound. +Every pattern ´p \in \{p_1,, ..., p_n\}´ can be typed in two ways. +First, it is attempted to type ´p´ with ´T´ as its expected type. +If this fails, ´p´ is instead typed with a modified expected type ´T'´ which results from ´T´ by replacing every occurrence of a type parameter ´a_i´ by +*undefined*. +If this second step fails also, a compile-time error results. +If the second step succeeds, let ´T_p´ be the type of pattern ´p´ seen as an expression. +One then determines minimal bounds ´L_11, ..., L_m'´ and maximal bounds ´U_1', ..., U_m'´ such that for all ´i´, ´L_i <: L_i'´ and ´U_i' <: U_i´ and the following constraint system is satisfied: + +$$ +L_1 <: a_1 <: U_1\;\wedge\;...\;\wedge\;L_m <: a_m <: U_m \ \Rightarrow\ T_p <: T +$$ + +If no such bounds can be found, a compile time error results. +If such bounds are found, the pattern matching clause starting with ´p´ is then typed under the assumption that each ´a_i´ has lower bound ´L_i'´ instead of ´L_i´ and has upper bound ´U_i'´ instead of ´U_i´. + +The expected type of every block ´b_i´ is the expected type of the whole pattern matching expression. +The type of the pattern matching expression is then the [weak least upper bound](03-types.html#weak-conformance) of the types of all blocks ´b_i´. + +When applying a pattern matching expression to a selector value, patterns are tried in sequence until one is found which matches the [selector value](#patterns). +Say this case is `case ´p_i \Rightarrow b_i´`. +The result of the whole expression is the result of evaluating ´b_i´, where all pattern variables of ´p_i´ are bound to the corresponding parts of the selector value. +If no matching pattern is found, a `scala.MatchError` exception is thrown. + +The pattern in a case may also be followed by a guard suffix `if e` with a boolean expression ´e´. +The guard expression is evaluated if the preceding pattern in the case matches. +If the guard expression evaluates to `true`, the pattern match succeeds as normal. +If the guard expression evaluates to `false`, the pattern in the case is considered not to match and the search for a matching pattern continues. + +In the interest of efficiency the evaluation of a pattern matching expression may try patterns in some other order than textual sequence. +This might affect evaluation through side effects in guards. +However, it is guaranteed that a guard expression is evaluated only if the pattern it guards matches. + +If the selector of a pattern match is an instance of a [`sealed` class](05-classes-and-objects.html#modifiers), a [union type](03-types#union-and-intersection-types), or a combination thereof, the compilation of pattern matching can emit warnings which diagnose that a given set of patterns is not exhaustive, i.e. that there is a possibility of a `MatchError` being raised at run-time. + +###### Example + +Consider the following definitions of arithmetic terms: + +```scala +abstract class Term[T] +case class Lit(x: Int) extends Term[Int] +case class Succ(t: Term[Int]) extends Term[Int] +case class IsZero(t: Term[Int]) extends Term[Boolean] +case class If[T](c: Term[Boolean], + t1: Term[T], + t2: Term[T]) extends Term[T] +``` + +There are terms to represent numeric literals, incrementation, a zero test, and a conditional. +Every term carries as a type parameter the type of the expression it represents (either `Int` or `Boolean`). + +A type-safe evaluator for such terms can be written as follows. + +```scala +def eval[T](t: Term[T]): T = t match { + case Lit(n) => n + case Succ(u) => eval(u) + 1 + case IsZero(u) => eval(u) == 0 + case If(c, u1, u2) => eval(if (eval(c)) u1 else u2) +} +``` + +Note that the evaluator makes crucial use of the fact that type parameters of enclosing methods can acquire new bounds through pattern matching. + +For instance, the type of the pattern in the second case, `Succ(u)`, is `Int`. +It conforms to the selector type `T` only if we assume an upper and lower bound of `Int` for `T`. +Under the assumption `Int <: T <: Int` we can also verify that the type right hand side of the second case, `Int` conforms to its expected type, `T`. + +## Pattern Matching Anonymous Functions + +```ebnf + BlockExpr ::= ‘{’ CaseClauses ‘}’ +``` + +An anonymous function can be defined by a sequence of cases + +```scala +{ case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ } +``` + +which appear as an expression without a prior `match`. +The expected type of such an expression must in part be defined. +It must be either `scala.Function´k´[´S_1, ..., S_k´, ´R´]` for some ´k > 0´, or `scala.PartialFunction[´S_1´, ´R´]`, where the argument type(s) ´S_1, ..., S_k´ must be fully determined, but the result type ´R´ may be undetermined. + +If the expected type is [SAM-convertible](06-expressions.html#sam-conversion) to `scala.Function´k´[´S_1, ..., S_k´, ´R´]`, the expression is taken to be equivalent to the anonymous function: + +```scala +(´x_1: S_1, ..., x_k: S_k´) => (´x_1, ..., x_k´) match { + case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ +} +``` + +Here, each ´x_i´ is a fresh name. +As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn equivalent to the following instance creation expression, where ´T´ is the weak least upper bound of the types of all ´b_i´. + +```scala +new scala.Function´k´[´S_1, ..., S_k´, ´T´] { + def apply(´x_1: S_1, ..., x_k: S_k´): ´T´ = (´x_1, ..., x_k´) match { + case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ + } +} +``` + +If the expected type is `scala.PartialFunction[´S´, ´R´]`, the expression is taken to be equivalent to the following instance creation expression: + +```scala +new scala.PartialFunction[´S´, ´T´] { + def apply(´x´: ´S´): ´T´ = x match { + case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ + } + def isDefinedAt(´x´: ´S´): Boolean = { + case ´p_1´ => true ... case ´p_n´ => true + case _ => false + } +} +``` + +Here, ´x´ is a fresh name and ´T´ is the weak least upper bound of the types of all ´b_i´. +The final default case in the `isDefinedAt` method is omitted if one of the patterns ´p_1, ..., p_n´ is already a variable or wildcard pattern. + +###### Example +Here's an example which uses `foldLeft` to compute the scalar product of two vectors: + +```scala +def scalarProduct(xs: Array[Double], ys: Array[Double]) = + (xs zip ys).foldLeft(0.0) { + case (a, (b, c)) => a + b * c + } +``` + +The case clauses in this code are equivalent to the following anonymous function: + +```scala +(x, y) => (x, y) match { + case (a, (b, c)) => a + b * c +} +``` diff --git a/docs/_spec/09-top-level-definitions.md b/docs/_spec/09-top-level-definitions.md new file mode 100644 index 000000000000..8406c0180533 --- /dev/null +++ b/docs/_spec/09-top-level-definitions.md @@ -0,0 +1,178 @@ +--- +title: Top-Level Definitions +layout: default +chapter: 9 +--- + +# Top-Level Definitions + +## Compilation Units + +```ebnf +CompilationUnit ::= {‘package’ QualId semi} TopStatSeq +TopStatSeq ::= TopStat {semi TopStat} +TopStat ::= {Annotation} {Modifier} TmplDef + | Import + | Packaging + | PackageObject + | +QualId ::= id {‘.’ id} +``` + +A compilation unit consists of a sequence of packagings, import clauses, and class and object definitions, which may be preceded by a package clause. + +A _compilation unit_ + +```scala +package ´p_1´; +... +package ´p_n´; +´\mathit{stats}´ +``` + +starting with one or more package clauses is equivalent to a compilation unit consisting of the packaging + +```scala +package ´p_1´ { ... + package ´p_n´ { + ´\mathit{stats}´ + } ... +} +``` + +Every compilation unit implicitly imports the following packages, in the given order: + 1. the package `java.lang`, + 2. the package `scala`, and + 3. the object [`scala.Predef`](12-the-scala-standard-library.html#the-predef-object), unless there is an explicit top-level import that references `scala.Predef`. + +Members of a later import in that order hide members of an earlier import. + +The exception to the implicit import of `scala.Predef` can be useful to hide, e.g., predefined implicit conversions. + +## Packagings + +```ebnf +Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’ +``` + +A _package_ is a special object which defines a set of member classes, objects and packages. +Unlike other objects, packages are not introduced by a definition. +Instead, the set of members of a package is determined by packagings. + +A packaging `package ´p´ { ´\mathit{ds}´ }` injects all definitions in ´\mathit{ds}´ as members into the package whose qualified name is ´p´. +Members of a package are called _top-level_ definitions. +If a definition in ´\mathit{ds}´ is labeled `private`, it is visible only for other members in the package. + +Inside the packaging, all members of package ´p´ are visible under their simple names. +However this rule does not extend to members of enclosing packages of ´p´ that are designated by a prefix of the path ´p´. + +```scala +package org.net.prj { + ... +} +``` + +all members of package `org.net.prj` are visible under their simple names, but members of packages `org` or `org.net` require explicit qualification or imports. + +Selections ´p´.´m´ from ´p´ as well as imports from ´p´ work as for objects. +However, unlike other objects, packages may not be used as values. +It is illegal to have a package with the same fully qualified name as a module or a class. + +Top-level definitions outside a packaging are assumed to be injected into a special empty package. +That package cannot be named and therefore cannot be imported. +However, members of the empty package are visible to each other without qualification. + +## Package Objects + +```ebnf +PackageObject ::= ‘package’ ‘object’ ObjectDef +``` + +A _package object_ `package object ´p´ extends ´t´` adds the members of template ´t´ to the package ´p´. +There can be only one package object per package. +The standard naming convention is to place the definition above in a file named `package.scala` that's located in the directory corresponding to package ´p´. + +The package object should not define a member with the same name as one of the top-level objects or classes defined in package ´p´. +If there is a name conflict, the behavior of the program is currently undefined. +It is expected that this restriction will be lifted in a future version of Scala. + +## Package References + +```ebnf +QualId ::= id {‘.’ id} +``` + +A reference to a package takes the form of a qualified identifier. +Like all other references, package references are relative. +That is, a package reference starting in a name ´p´ will be looked up in the closest enclosing scope that defines a member named ´p´. + +If a package name is shadowed, it's possible to refer to its fully-qualified name by prefixing it with the special predefined name `_root_`, which refers to the outermost root package that contains all top-level packages. + +The name `_root_` has this special denotation only when used as the first element of a qualifier; it is an ordinary identifier otherwise. + +###### Example +Consider the following program: + +```scala +package b { + class B +} + +package a { + package b { + class A { + val x = new _root_.b.B + } + class C { + import _root_.b._ + def y = new B + } + } +} + +``` + +Here, the reference `_root_.b.B` refers to class `B` in the toplevel package `b`. +If the `_root_` prefix had been omitted, the name `b` would instead resolve to the package `a.b`, and, provided that package does not also contain a class `B`, a compiler-time error would result. + +## Programs + +A _program_ is a top-level object that has a member method _main_ of type `(Array[String])Unit`. Programs can be executed from a command shell. +The program's command arguments are passed to the `main` method as a parameter of type `Array[String]`. + +The `main` method of a program can be directly defined in the object, or it can be inherited. +The scala library defines a special class `scala.App` whose body acts as a `main` method. +An objects ´m´ inheriting from this class is thus a program, which executes the initialization code of the object ´m´. + +###### Example +The following example will create a hello world program by defining a method `main` in module `test.HelloWorld`. + +```scala +package test +object HelloWorld { + def main(args: Array[String]) { println("Hello World") } +} +``` + +This program can be started by the command + +```scala +scala test.HelloWorld +``` + +In a Java environment, the command + +```scala +java test.HelloWorld +``` + +would work as well. + +`HelloWorld` can also be defined without a `main` method by inheriting from `App` instead: + +```scala +package test +object HelloWorld extends App { + println("Hello World") +} +``` diff --git a/docs/_spec/10-xml-expressions-and-patterns.md b/docs/_spec/10-xml-expressions-and-patterns.md new file mode 100644 index 000000000000..c929e24fe93d --- /dev/null +++ b/docs/_spec/10-xml-expressions-and-patterns.md @@ -0,0 +1,124 @@ +--- +title: XML +layout: default +chapter: 10 +--- + +# XML Expressions and Patterns + +__By Burak Emir__ + +This chapter describes the syntactic structure of XML expressions and patterns. +It follows as closely as possible the XML 1.0 specification, changes being mandated by the possibility of embedding Scala code fragments. + +## XML expressions + +XML expressions are expressions generated by the following production, where the opening bracket `<` of the first element must be in a position to start the lexical [XML mode](01-lexical-syntax.html#xml-mode). + +```ebnf +XmlExpr ::= XmlContent {Element} +``` + +Well-formedness constraints of the XML specification apply, which means for instance that start tags and end tags must match, and attributes may only be defined once, except for constraints related to entity resolution. + +The following productions describe Scala's extensible markup language, designed as close as possible to the W3C extensible markup language standard. +Only the productions for attribute values and character data are changed. +Scala does not support declarations. +Entity references are not resolved at runtime. + +```ebnf +Element ::= EmptyElemTag + | STag Content ETag + +EmptyElemTag ::= ‘<’ Name {S Attribute} [S] ‘/>’ + +STag ::= ‘<’ Name {S Attribute} [S] ‘>’ +ETag ::= ‘’ +Content ::= [CharData] {Content1 [CharData]} +Content1 ::= XmlContent + | Reference + | ScalaExpr +XmlContent ::= Element + | CDSect + | PI + | Comment +``` + +If an XML expression is a single element, its value is a runtime representation of an XML node (an instance of a subclass of `scala.xml.Node`). +If the XML expression consists of more than one element, then its value is a runtime representation of a sequence of XML nodes (an instance of a subclass of `scala.Seq[scala.xml.Node]`). + +If an XML expression is an entity reference, CDATA section, processing instruction, or a comment, it is represented by an instance of the corresponding Scala runtime class. + +By default, beginning and trailing whitespace in element content is removed, and consecutive occurrences of whitespace are replaced by a single space character `\u0020`. +This behavior can be changed to preserve all whitespace with a compiler option. + +```ebnf +Attribute ::= Name Eq AttValue + +AttValue ::= ‘"’ {CharQ | CharRef} ‘"’ + | ‘'’ {CharA | CharRef} ‘'’ + | ScalaExpr + +ScalaExpr ::= Block + +CharData ::= { CharNoRef } ´\textit{ without}´ {CharNoRef}‘{’CharB {CharNoRef} + ´\textit{ and without}´ {CharNoRef}‘]]>’{CharNoRef} +``` + + +XML expressions may contain Scala expressions as attribute values or within nodes. +In the latter case, these are embedded using a single opening brace `{` and ended by a closing brace `}`. +To express a single opening braces within XML text as generated by CharData, it must be doubled. +Thus, `{{` represents the XML text `{` and does not introduce an embedded Scala expression. + + +```ebnf +BaseChar, CDSect, Char, Comment, CombiningChar, Ideographic, NameChar, PI, S, Reference + ::= ´\textit{“as in W3C XML”}´ + +Char1 ::= Char ´\textit{ without}´ ‘<’ | ‘&’ +CharQ ::= Char1 ´\textit{ without}´ ‘"’ +CharA ::= Char1 ´\textit{ without}´ ‘'’ +CharB ::= Char1 ´\textit{ without}´ ‘{’ + +Name ::= XNameStart {NameChar} + +XNameStart ::= ‘_’ | BaseChar | Ideographic + ´\textit{ (as in W3C XML, but without }´ ‘:’´)´ +``` + +## XML patterns + +XML patterns are patterns generated by the following production, where the opening bracket `<` of the element patterns must be in a position to start the lexical [XML mode](01-lexical-syntax.html#xml-mode). + +```ebnf +XmlPattern ::= ElementPattern +``` + +Well-formedness constraints of the XML specification apply. + +An XML pattern has to be a single element pattern. +It matches exactly those runtime representations of an XML tree that have the same structure as described by the pattern. +XML patterns may contain [Scala patterns](08-pattern-matching.html#pattern-matching-expressions). + +Whitespace is treated the same way as in XML expressions. + +By default, beginning and trailing whitespace in element content is removed, and consecutive occurrences of whitespace are replaced by a single space character `\u0020`. +This behavior can be changed to preserve all whitespace with a compiler option. + +```ebnf +ElemPattern ::= EmptyElemTagP + | STagP ContentP ETagP + +EmptyElemTagP ::= ‘<’ Name [S] ‘/>’ +STagP ::= ‘<’ Name [S] ‘>’ +ETagP ::= ‘’ +ContentP ::= [CharData] {(ElemPattern|ScalaPatterns) [CharData]} +ContentP1 ::= ElemPattern + | Reference + | CDSect + | PI + | Comment + | ScalaPatterns +ScalaPatterns ::= ‘{’ Patterns ‘}’ +``` diff --git a/docs/_spec/11-annotations.md b/docs/_spec/11-annotations.md new file mode 100644 index 000000000000..3388d55318ea --- /dev/null +++ b/docs/_spec/11-annotations.md @@ -0,0 +1,126 @@ +--- +title: Annotations +layout: default +chapter: 11 +--- + +# Annotations + +```ebnf + Annotation ::= ‘@’ SimpleType {ArgumentExprs} + ConstrAnnotation ::= ‘@’ SimpleType ArgumentExprs +``` + +## Definition + +Annotations associate meta-information with definitions. +A simple annotation has the form `@´c´` or `@´c(a_1, ..., a_n)´`. +Here, ´c´ is a constructor of a class ´C´, which must conform to the class `scala.Annotation`. + +Annotations may apply to definitions or declarations, types, or expressions. +An annotation of a definition or declaration appears in front of that definition. +An annotation of a type appears after that type. +An annotation of an expression ´e´ appears after the expression ´e´, separated by a colon. +More than one annotation clause may apply to an entity. +The order in which these annotations are given does not matter. + +Examples: + +```scala +@deprecated("Use D", "1.0") class C { ... } // Class annotation +@transient @volatile var m: Int // Variable annotation +String @local // Type annotation +(e: @unchecked) match { ... } // Expression annotation +``` + +## Predefined Annotations + +### Java Platform Annotations + +The meaning of annotation clauses is implementation-dependent. +On the Java platform, the following annotations have a standard meaning. + +* `@transient` Marks a field to be non-persistent; this is equivalent to the `transient` modifier in Java. + +* `@volatile` Marks a field which can change its value outside the control of the program; this is equivalent to the `volatile` modifier in Java. + +* `@SerialVersionUID()` Attaches a serial version identifier (a `long` constant) to a class. +This is equivalent to the following field definition in Java: + +```java +private final static SerialVersionUID = +``` + +* `@throws()` A Java compiler checks that a program contains handlers for checked exceptions by analyzing which checked exceptions can result from the execution of a method or constructor. +For each checked exception which is a possible result, the `throws` clause for the method or constructor must mention the class of that exception or one of the superclasses of the class of that exception. + +### Java Beans Annotations + +* `@scala.beans.BeanProperty` When prefixed to a definition of some variable `X`, this annotation causes getter and setter methods `getX`, `setX` in the Java bean style to be added in the class containing the variable. +The first letter of the variable appears capitalized after the `get` or `set`. +When the annotation is added to the definition of an immutable value definition `X`, only a getter is generated. +The construction of these methods is part of code-generation; therefore, these methods become visible only once a classfile for the containing class is generated. + +* `@scala.beans.BooleanBeanProperty` This annotation is equivalent to `scala.reflect.BeanProperty`, but the generated getter method is named `isX` instead of `getX`. + +### Deprecation Annotations + +* `@deprecated(message: , since: )`
+Marks a definition as deprecated. +Accesses to the defined entity will then cause a deprecated warning mentioning the _message_ `` to be issued from the compiler. +The argument _since_ documents since when the definition should be considered deprecated.
+Deprecated warnings are suppressed in code that belongs itself to a definition that is labeled deprecated. + +* `@deprecatedName(name: , since: )`
+Marks a formal parameter name as deprecated. +Invocations of this entity using named parameter syntax referring to the deprecated parameter name cause a deprecation warning. + +### Scala Compiler Annotations + +* `@unchecked` When applied to the selector of a `match` expression, this attribute suppresses any warnings about non-exhaustive pattern matches that would otherwise be emitted. +For instance, no warnings would be produced for the method definition below. +```scala +def f(x: Option[Int]) = (x: @unchecked) match { + case Some(y) => y +} +``` +Without the `@unchecked` annotation, a Scala compiler could infer that the pattern match is non-exhaustive, and could produce a warning because `Option` is a `sealed` class. + +* `@uncheckedStable` When applied a value declaration or definition, it allows the defined value to appear in a path, even if its type is [volatile](03-types.html#volatile-types). +For instance, the following member definitions are legal: +```scala +type A { type T } +type B +@uncheckedStable val x: A with B // volatile type +val y: x.T // OK since `x' is still a path +``` +Without the `@uncheckedStable` annotation, the designator `x` would not be a path since its type `A with B` is volatile. +Hence, the reference `x.T` would be malformed. + +When applied to value declarations or definitions that have non-volatile types, the annotation has no effect. + +* `@specialized` When applied to the definition of a type parameter, this annotation causes the compiler to generate specialized definitions for primitive types. +An optional list of primitive types may be given, in which case specialization takes into account only those types. +For instance, the following code would generate specialized traits for `Unit`, `Int` and `Double` +```scala +trait Function0[@specialized(Unit, Int, Double) T] { + def apply: T +} +``` +Whenever the static type of an expression matches a specialized variant of a definition, the compiler will instead use the specialized version. +See the [specialization sid](https://docs.scala-lang.org/sips/scala-specialization.html) for more details of the implementation. + + +## User-defined Annotations + +Other annotations may be interpreted by platform- or application-dependent tools. +The class `scala.annotation.Annotation` is the base class for user-defined annotations. It has two sub-traits: +- `scala.annotation.StaticAnnotation`: Instances of a subclass of this trait will be stored in the generated class files, and therefore accessible to runtime reflection and later compilation runs. +- `scala.annotation.ConstantAnnotation`: Instances of a subclass of this trait may only have arguments which are [constant expressions](06-expressions.html#constant-expressions), and are also stored in the generated class files. +- If an annotation class inherits from neither `scala.ConstantAnnotation` nor `scala.StaticAnnotation`, its instances are visible only locally during the compilation run that analyzes them. + +## Host-platform Annotations + +The host platform may define its own annotation format. +These annotations do not extend any of the classes in the `scala.annotation` package, but can generally be used in the same way as Scala annotations. +The host platform may impose additional restrictions on the expressions which are valid as annotation arguments. diff --git a/docs/_spec/12-the-scala-standard-library.md b/docs/_spec/12-the-scala-standard-library.md new file mode 100644 index 000000000000..441955df9b4f --- /dev/null +++ b/docs/_spec/12-the-scala-standard-library.md @@ -0,0 +1,726 @@ +--- +title: Standard Library +layout: default +chapter: 12 +--- + +# The Scala Standard Library + +The Scala standard library consists of the package `scala` with a number of classes and modules. +Some of these classes are described in the following. + +![Class hierarchy of Scala](public/images/classhierarchy.png) + + +## Root Classes + +The root of this hierarchy is formed by class `Any`. +Every class in a Scala execution environment inherits directly or indirectly from this class. +Class `Any` has two direct subclasses: `AnyRef` and `AnyVal`. + +The subclass `AnyRef` represents all values which are represented as objects in the underlying host system. +Classes written in other languages inherit from `scala.AnyRef`. + +The predefined subclasses of class `AnyVal` describe values which are not implemented as objects in the underlying host system. + +User-defined Scala classes which do not explicitly inherit from `AnyVal` inherit directly or indirectly from `AnyRef`. +They cannot inherit from both `AnyRef` and `AnyVal`. + +Classes `AnyRef` and `AnyVal` are required to provide only the members declared in class `Any`, but implementations may add host-specific methods to these classes (for instance, an implementation may identify class `AnyRef` with its own root class for objects). + +The signatures of these root classes are described by the following definitions. + +```scala +package scala +/** The universal root class */ +abstract class Any { + + /** Defined equality; abstract here */ + def equals(that: Any): Boolean + + /** Semantic equality between values */ + final def == (that: Any): Boolean = + if (null eq this) null eq that else this equals that + + /** Semantic inequality between values */ + final def != (that: Any): Boolean = !(this == that) + + /** Hash code; abstract here */ + def hashCode: Int = ... + + /** Textual representation; abstract here */ + def toString: String = ... + + /** Type test; needs to be inlined to work as given */ + def isInstanceOf[a]: Boolean + + /** Type cast; needs to be inlined to work as given */ */ + def asInstanceOf[A]: A = this match { + case x: A => x + case _ => if (this eq null) this + else throw new ClassCastException() + } +} + +/** The root class of all value types */ +final class AnyVal extends Any + +/** The root class of all reference types */ +class AnyRef extends Any { + def equals(that: Any): Boolean = this eq that + final def eq(that: AnyRef): Boolean = ... // reference equality + final def ne(that: AnyRef): Boolean = !(this eq that) + + def hashCode: Int = ... // hashCode computed from allocation address + def toString: String = ... // toString computed from hashCode and class name + + def synchronized[T](body: => T): T // execute `body` in while locking `this`. +} +``` + +The type test `´x´.isInstanceOf[´T´]` is equivalent to a typed pattern match + +```scala +´x´ match { + case _: ´T'´ => true + case _ => false +} +``` + +where the type ´T'´ is the same as ´T´ except if ´T´ is of the form ´D´ or ´D[\mathit{tps}]´ where ´D´ is a type member of some outer class ´C´. +In this case ´T'´ is `´C´#´D´` (or `´C´#´D[tps]´`, respectively), whereas ´T´ itself would expand to `´C´.this.´D[tps]´`. +In other words, an `isInstanceOf` test does not check that types have the same enclosing instance. + +The test `´x´.asInstanceOf[´T´]` is treated specially if ´T´ is a [numeric value type](#value-classes). +In this case the cast will be translated to an application of a [conversion method](#numeric-value-types) `x.to´T´`. +For non-numeric values ´x´ the operation will raise a `ClassCastException`. + +## Value Classes + +Value classes are classes whose instances are not represented as objects by the underlying host system. +All value classes inherit from class `AnyVal`. +Scala implementations need to provide the value classes `Unit`, `Boolean`, `Double`, `Float`, `Long`, `Int`, `Char`, `Short`, and `Byte` (but are free to provide others as well). +The signatures of these classes are defined in the following. + +### Numeric Value Types + +Classes `Double`, `Float`, `Long`, `Int`, `Char`, `Short`, and `Byte` are together called _numeric value types_. +Classes `Byte`, `Short`, or `Char` are called _subrange types_. +Subrange types, as well as `Int` and `Long` are called _integer types_, whereas `Float` and `Double` are called _floating point types_. + +Numeric value types are ranked in the following partial order: + +```scala +Byte - Short + \ + Int - Long - Float - Double + / + Char +``` + +`Byte` and `Short` are the lowest-ranked types in this order, whereas `Double` is the highest-ranked. +Ranking does _not_ +imply a [conformance relationship](03-types.html#conformance); for instance `Int` is not a subtype of `Long`. +However, object [`Predef`](#the-predef-object) defines [views](07-implicits.html#views) from every numeric value type to all higher-ranked numeric value types. +Therefore, lower-ranked types are implicitly converted to higher-ranked types when required by the [context](06-expressions.html#implicit-conversions). + +Given two numeric value types ´S´ and ´T´, the _operation type_ of ´S´ and ´T´ is defined as follows: If both ´S´ and ´T´ are subrange types then the operation type of ´S´ and ´T´ is `Int`. +Otherwise the operation type of ´S´ and ´T´ is the larger of the two types wrt +ranking. +Given two numeric values ´v´ and ´w´ the operation type of ´v´ and ´w´ is the operation type of their run-time types. + +Any numeric value type ´T´ supports the following methods. + +* Comparison methods for equals (`==`), not-equals (`!=`), less-than (`<`), greater-than (`>`), less-than-or-equals (`<=`), greater-than-or-equals (`>=`), which each exist in 7 overloaded alternatives. +Each alternative takes a parameter of some numeric value type. +Its result type is type `Boolean`. +The operation is evaluated by converting the receiver and its argument to their operation type and performing the given comparison operation of that type. +* Arithmetic methods addition (`+`), subtraction (`-`), multiplication (`*`), division (`/`), and remainder (`%`), which each exist in 7 overloaded alternatives. +Each alternative takes a parameter of some numeric value type ´U´. +Its result type is the operation type of ´T´ and ´U´. +The operation is evaluated by converting the receiver and its argument to their operation type and performing the given arithmetic operation of that type. +* Parameterless arithmetic methods identity (`+`) and negation (`-`), with result type ´T´. +The first of these returns the receiver unchanged, whereas the second returns its negation. +* Conversion methods `toByte`, `toShort`, `toChar`, `toInt`, `toLong`, `toFloat`, `toDouble` which convert the receiver object to the target type, using the rules of Java's numeric type cast operation. +The conversion might truncate the numeric value (as when going from `Long` to `Int` or from `Int` to `Byte`) or it might lose precision (as when going from `Double` to `Float` or when converting between `Long` and `Float`). + +Integer numeric value types support in addition the following operations: + +* Bit manipulation methods bitwise-and (`&`), bitwise-or {`|`}, and bitwise-exclusive-or (`^`), which each exist in 5 overloaded alternatives. +Each alternative takes a parameter of some integer numeric value type. +Its result type is the operation type of ´T´ and ´U´. +The operation is evaluated by converting the receiver and its argument to their operation type and performing the given bitwise operation of that type. + +* A parameterless bit-negation method (`~`). +Its result type is the receiver type ´T´ or `Int`, whichever is larger. +The operation is evaluated by converting the receiver to the result type and negating every bit in its value. +* Bit-shift methods left-shift (`<<`), arithmetic right-shift (`>>`), and unsigned right-shift (`>>>`). +Each of these methods has two overloaded alternatives, which take a parameter ´n´ of type `Int`, respectively `Long`. +The result type of the operation is the receiver type ´T´, or `Int`, whichever is larger. +The operation is evaluated by converting the receiver to the result type and performing the specified shift by ´n´ bits. + +Numeric value types also implement operations `equals`, `hashCode`, and `toString` from class `Any`. + +The `equals` method tests whether the argument is a numeric value type. +If this is true, it will perform the `==` operation which is appropriate for that type. +That is, the `equals` method of a numeric value type can be thought of being defined as follows: + +```scala +def equals(other: Any): Boolean = other match { + case that: Byte => this == that + case that: Short => this == that + case that: Char => this == that + case that: Int => this == that + case that: Long => this == that + case that: Float => this == that + case that: Double => this == that + case _ => false +} +``` + +The `hashCode` method returns an integer hashcode that maps equal numeric values to equal results. +It is guaranteed to be the identity for type `Int` and for all subrange types. + +The `toString` method displays its receiver as an integer or floating point number. + +###### Example + +This is the signature of the numeric value type `Int`: + +```scala +package scala +abstract sealed class Int extends AnyVal { + def == (that: Double): Boolean // double equality + def == (that: Float): Boolean // float equality + def == (that: Long): Boolean // long equality + def == (that: Int): Boolean // int equality + def == (that: Short): Boolean // int equality + def == (that: Byte): Boolean // int equality + def == (that: Char): Boolean // int equality + /* analogous for !=, <, >, <=, >= */ + + def + (that: Double): Double // double addition + def + (that: Float): Double // float addition + def + (that: Long): Long // long addition + def + (that: Int): Int // int addition + def + (that: Short): Int // int addition + def + (that: Byte): Int // int addition + def + (that: Char): Int // int addition + /* analogous for -, *, /, % */ + + def & (that: Long): Long // long bitwise and + def & (that: Int): Int // int bitwise and + def & (that: Short): Int // int bitwise and + def & (that: Byte): Int // int bitwise and + def & (that: Char): Int // int bitwise and + /* analogous for |, ^ */ + + def << (cnt: Int): Int // int left shift + def << (cnt: Long): Int // long left shift + /* analogous for >>, >>> */ + + def unary_+ : Int // int identity + def unary_- : Int // int negation + def unary_~ : Int // int bitwise negation + + def toByte: Byte // convert to Byte + def toShort: Short // convert to Short + def toChar: Char // convert to Char + def toInt: Int // convert to Int + def toLong: Long // convert to Long + def toFloat: Float // convert to Float + def toDouble: Double // convert to Double +} +``` + +### Class `Boolean` + +Class `Boolean` has only two values: `true` and `false`. +It implements operations as given in the following class definition. + +```scala +package scala +abstract sealed class Boolean extends AnyVal { + def && (p: => Boolean): Boolean = // boolean and + if (this) p else false + def || (p: => Boolean): Boolean = // boolean or + if (this) true else p + def & (x: Boolean): Boolean = // boolean strict and + if (this) x else false + def | (x: Boolean): Boolean = // boolean strict or + if (this) true else x + def == (x: Boolean): Boolean = // boolean equality + if (this) x else x.unary_! + def != (x: Boolean): Boolean = // boolean inequality + if (this) x.unary_! else x + def unary_!: Boolean = // boolean negation + if (this) false else true +} +``` + +The class also implements operations `equals`, `hashCode`, and `toString` from class `Any`. + +The `equals` method returns `true` if the argument is the same boolean value as the receiver, `false` otherwise. +The `hashCode` method returns a fixed, implementation-specific hash-code when invoked on `true`, and a different, fixed, implementation-specific hash-code when invoked on `false`. +The `toString` method returns the receiver converted to a string, i.e. either `"true"` or `"false"`. + +### Class `Unit` + +Class `Unit` has only one value: `()`. +It implements only the three methods `equals`, `hashCode`, and `toString` from class `Any`. + +The `equals` method returns `true` if the argument is the unit value `()`, `false` otherwise. +The `hashCode` method returns a fixed, implementation-specific hash-code. +The `toString` method returns `"()"`. + +## Standard Reference Classes + +This section presents some standard Scala reference classes which are treated in a special way by the Scala compiler – either Scala provides syntactic sugar for them, or the Scala compiler generates special code for their operations. +Other classes in the standard Scala library are documented in the Scala library documentation by HTML pages. + +### Class `String` + +Scala's `String` class is usually derived from the standard String class of the underlying host system (and may be identified with it). +For Scala clients the class is taken to support in each case a method + +```scala +def + (that: Any): String +``` + +which concatenates its left operand with the textual representation of its right operand. + + +### The `Function` Classes + +For each class type `Function´n´` where ´n = 0, ..., 22´, Scala defines the following function class: + +```scala +package scala +trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: + def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ + override def toString = "" + def curried: ´T_1´ => ... => ´T_n´ => R = ... + def tupled: ((´T_1´, ..., ´T_n´)) => R = ... +``` + +For function types `Function´n´` where ´n > 22´, Scala defines a unique function class: + +```scala +package scala +trait FunctionXXL: + def apply(xs: IArray[Object]): Object + override def toString = "" +``` + +There is no loss of type safety, as the internal representation is still `Function´n´` for all ´n´. +However this means methods `curried` and `tupled` are not available on functions with more than 22 parameters. + +The implicitly imported [`Predef`](#the-predef-object) object defines the name +`Function` as an alias of `Function1`. + + +The `PartialFunction` subclass of `Function1` represents functions that (indirectly) specify their domain. +Use the `isDefined` method to query whether the partial function is defined for a given input (i.e., whether the input is part of the function's domain). + +```scala +class PartialFunction[-A, +B] extends Function1[A, B] { + def isDefinedAt(x: A): Boolean +} +``` + +### Trait `Product` + + +All case classes automatically extend the `Product` trait (and generate synthetic methods to conform to it) (but not `Product´n´`), and define a `_´n´` method for each of their arguments. + +### Trait `Enum` + +All enum definitions automatically extend the `reflect.Enum` trait (and generate synthetic methods to conform to it). + +### Class `Array` + +All operations on arrays desugar to the corresponding operations of the underlying platform. +Therefore, the following class definition is given for informational purposes only: + +```scala +final class Array[T](_length: Int) +extends java.io.Serializable with java.lang.Cloneable { + def length: Int = ... + def apply(i: Int): T = ... + def update(i: Int, x: T): Unit = ... + override def clone(): Array[T] = ... +} +``` + +If ´T´ is not a type parameter or abstract type, the type `Array[T]` is represented as the array type `|T|[]` in the underlying host system, where `|T|` is the erasure of `T`. +If ´T´ is a type parameter or abstract type, a different representation might be used (it is `Object` on the Java platform). + +#### Operations + +`length` returns the length of the array, `apply` means subscripting, and `update` means element update. + +Because of the syntactic sugar for `apply` and `update` operations, we have the following correspondences between Scala and Java code for operations on an array `xs`: + +|_Scala_ |_Java_ | +|------------------|------------| +|`xs.length` |`xs.length` | +|`xs(i)` |`xs[i]` | +|`xs(i) = e` |`xs[i] = e` | + +Two implicit conversions exist in `Predef` that are frequently applied to arrays: a conversion to `scala.collection.mutable.ArrayOps` and a conversion to `scala.collection.mutable.ArraySeq` (a subtype of `scala.collection.Seq`). + +Both types make many of the standard operations found in the Scala collections API available. +The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return a value of type `Array`, while the conversion to `ArraySeq` is permanent as all operations return a value of type `ArraySeq`. +The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. + +Because of the tension between parametrized types in Scala and the ad-hoc implementation of arrays in the host-languages, some subtle points need to be taken into account when dealing with arrays. +These are explained in the following. + +#### Variance + +Unlike arrays in Java, arrays in Scala are _not_ co-variant; That is, ´S <: T´ does not imply `Array[´S´] ´<:´ Array[´T´]` in Scala. +However, it is possible to cast an array of ´S´ to an array of ´T´ if such a cast is permitted in the host environment. + +For instance `Array[String]` does not conform to `Array[Object]`, even though `String` conforms to `Object`. +However, it is possible to cast an expression of type `Array[String]` to `Array[Object]`, and this cast will succeed without raising a `ClassCastException`. Example: + +```scala +val xs = new Array[String](2) +// val ys: Array[Object] = xs // **** error: incompatible types +val ys: Array[Object] = xs.asInstanceOf[Array[Object]] // OK +``` + +The instantiation of an array with a polymorphic element type ´T´ requires information about type ´T´ at runtime. +This information is synthesized by adding a [context bound](07-implicits.html#context-bounds-and-view-bounds) of `scala.reflect.ClassTag` to type ´T´. +An example is the following implementation of method `mkArray`, which creates an array of an arbitrary type ´T´, given a sequence of ´T´`s which defines its elements: + +```scala +import reflect.ClassTag +def mkArray[T : ClassTag](elems: Seq[T]): Array[T] = { + val result = new Array[T](elems.length) + var i = 0 + for (elem <- elems) { + result(i) = elem + i += 1 + } + result +} +``` + +If type ´T´ is a type for which the host platform offers a specialized array representation, this representation is used. + +###### Example +On the Java Virtual Machine, an invocation of `mkArray(List(1,2,3))` will return a primitive array of `int`s, written as `int[]` in Java. + +#### Companion object + +`Array`'s companion object provides various factory methods for the instantiation of single- and multi-dimensional arrays, an extractor method [`unapplySeq`](08-pattern-matching.html#extractor-patterns) which enables pattern matching over arrays and additional utility methods: + +```scala +package scala +object Array { + /** copies array elements from `src` to `dest`. */ + def copy(src: AnyRef, srcPos: Int, + dest: AnyRef, destPos: Int, length: Int): Unit = ... + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = + + /** Create an array with given elements. */ + def apply[T: ClassTag](xs: T*): Array[T] = ... + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = ... + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = ... + ... + + /** Concatenate all argument arrays into a single array. */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = ... + + /** Returns an array that contains the results of some element computation a number + * of times. */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = ... + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = ... + ... + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = ... + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = ... + ... + + /** Returns an array containing a sequence of increasing integers in a range. */ + def range(start: Int, end: Int): Array[Int] = ... + /** Returns an array containing equally spaced values in some integer interval. */ + def range(start: Int, end: Int, step: Int): Array[Int] = ... + + /** Returns an array containing repeated applications of a function to a start value. */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = ... + + /** Enables pattern matching over arrays */ + def unapplySeq[A](x: Array[A]): Option[IndexedSeq[A]] = Some(x) +} +``` + +## Class Node + +```scala +package scala.xml + +trait Node { + + /** the label of this node */ + def label: String + + /** attribute axis */ + def attribute: Map[String, String] + + /** child axis (all children of this node) */ + def child: Seq[Node] + + /** descendant axis (all descendants of this node) */ + def descendant: Seq[Node] = child.toList.flatMap { + x => x::x.descendant.asInstanceOf[List[Node]] + } + + /** descendant axis (all descendants of this node) */ + def descendant_or_self: Seq[Node] = this::child.toList.flatMap { + x => x::x.descendant.asInstanceOf[List[Node]] + } + + override def equals(x: Any): Boolean = x match { + case that:Node => + that.label == this.label && + that.attribute.sameElements(this.attribute) && + that.child.sameElements(this.child) + case _ => false + } + + /** XPath style projection function. Returns all children of this node + * that are labeled with 'that'. The document order is preserved. + */ + def \(that: Symbol): NodeSeq = { + new NodeSeq({ + that.name match { + case "_" => child.toList + case _ => + var res:List[Node] = Nil + for (x <- child.elements if x.label == that.name) { + res = x::res + } + res.reverse + } + }) + } + + /** XPath style projection function. Returns all nodes labeled with the + * name 'that' from the 'descendant_or_self' axis. Document order is preserved. + */ + def \\(that: Symbol): NodeSeq = { + new NodeSeq( + that.name match { + case "_" => this.descendant_or_self + case _ => this.descendant_or_self.asInstanceOf[List[Node]]. + filter(x => x.label == that.name) + }) + } + + /** hashcode for this XML node */ + override def hashCode = + Utility.hashCode(label, attribute.toList.hashCode, child) + + /** string representation of this node */ + override def toString = Utility.toXML(this) + +} +``` + +## The `Predef` Object + +The `Predef` object defines standard methods and type aliases for Scala programs. +It is implicitly imported, as described in [the chapter on name binding](02-identifiers-names-and-scopes.html), so that all its defined members are available without qualification. +Its definition for the JVM environment conforms to the following signature: + +```scala +package scala +object Predef { + + // classOf --------------------------------------------------------- + + /** Returns the runtime representation of a class type. */ + def classOf[T]: Class[T] = null + // this is a dummy, classOf is handled by compiler. + + // valueOf ----------------------------------------------------------- + + /** Retrieve the single value of a type with a unique inhabitant. */ + @inline def valueOf[T](implicit vt: ValueOf[T]): T {} = vt.value + // instances of the ValueOf type class are provided by the compiler. + + // Standard type aliases --------------------------------------------- + + type String = java.lang.String + type Class[T] = java.lang.Class[T] + + // Miscellaneous ----------------------------------------------------- + + type Function[-A, +B] = Function1[A, B] + + type Map[A, +B] = collection.immutable.Map[A, B] + type Set[A] = collection.immutable.Set[A] + + val Map = collection.immutable.Map + val Set = collection.immutable.Set + + // Manifest types, companions, and incantations for summoning --------- + + type ClassManifest[T] = scala.reflect.ClassManifest[T] + type Manifest[T] = scala.reflect.Manifest[T] + type OptManifest[T] = scala.reflect.OptManifest[T] + val ClassManifest = scala.reflect.ClassManifest + val Manifest = scala.reflect.Manifest + val NoManifest = scala.reflect.NoManifest + + def manifest[T](implicit m: Manifest[T]) = m + def classManifest[T](implicit m: ClassManifest[T]) = m + def optManifest[T](implicit m: OptManifest[T]) = m + + // Minor variations on identity functions ----------------------------- + def identity[A](x: A): A = x + def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world + @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements + + // Asserts, Preconditions, Postconditions ----------------------------- + + def assert(assertion: Boolean) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed") + } + + def assert(assertion: Boolean, message: => Any) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed: " + message) + } + + def assume(assumption: Boolean) { + if (!assumption) + throw new IllegalArgumentException("assumption failed") + } + + def assume(assumption: Boolean, message: => Any) { + if (!assumption) + throw new IllegalArgumentException("assumption failed: " + message.toString) + } + + def require(requirement: Boolean) { + if (!requirement) + throw new IllegalArgumentException("requirement failed") + } + + def require(requirement: Boolean, message: => Any) { + if (!requirement) + throw new IllegalArgumentException("requirement failed: "+ message) + } +``` + +```scala + // Printing and reading ----------------------------------------------- + + def print(x: Any) = Console.print(x) + def println() = Console.println() + def println(x: Any) = Console.println(x) + def printf(text: String, xs: Any*) = Console.printf(text.format(xs: _*)) + + // Implicit conversions ------------------------------------------------ + + ... +} +``` + +### Predefined Implicit Definitions + +The `Predef` object also contains a number of implicit definitions, which are available by default (because `Predef` is implicitly imported). +Implicit definitions come in two priorities. +High-priority implicits are defined in the `Predef` class itself whereas low priority implicits are defined in a class inherited by `Predef`. +The rules of static [overloading resolution](06-expressions.html#overloading-resolution) stipulate that, all other things being equal, implicit resolution prefers high-priority implicits over low-priority ones. + +The available low-priority implicits include definitions falling into the following categories. + +1. For every primitive type, a wrapper that takes values of that type to instances of a `runtime.Rich*` class. +For instance, values of type `Int` can be implicitly converted to instances of class `runtime.RichInt`. + +1. For every array type with elements of primitive type, a wrapper that takes the arrays of that type to instances of a `ArraySeq` class. +For instance, values of type `Array[Float]` can be implicitly converted to instances of class `ArraySeq[Float]`. +There are also generic array wrappers that take elements of type `Array[T]` for arbitrary `T` to `ArraySeq`s. + +1. An implicit conversion from `String` to `WrappedString`. + +The available high-priority implicits include definitions falling into the following categories. + +* An implicit wrapper that adds `ensuring` methods with the following overloaded variants to type `Any`. +```scala +def ensuring(cond: Boolean): A = { assert(cond); x } +def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x } +def ensuring(cond: A => Boolean): A = { assert(cond(x)); x } +def ensuring(cond: A => Boolean, msg: Any): A = { assert(cond(x), msg); x } +``` + +* An implicit wrapper that adds a `->` method with the following implementation to type `Any`. +```scala +def -> [B](y: B): (A, B) = (x, y) +``` + +* For every array type with elements of primitive type, a wrapper that takes the arrays of that type to instances of a `runtime.ArrayOps` class. +For instance, values of type `Array[Float]` can be implicitly converted to instances of class `runtime.ArrayOps[Float]`. +There are also generic array wrappers that take elements of type `Array[T]` for arbitrary `T` to `ArrayOps`s. + +* An implicit wrapper that adds `+` and `formatted` method with the following implementations to type `Any`. +```scala +def +(other: String) = String.valueOf(self) + other +def formatted(fmtstr: String): String = fmtstr format self +``` + +* Numeric primitive conversions that implement the transitive closure of the following mappings: +``` +Byte -> Short +Short -> Int +Char -> Int +Int -> Long +Long -> Float +Float -> Double +``` + +* Boxing and unboxing conversions between primitive types and their boxed versions: +``` +Byte <-> java.lang.Byte +Short <-> java.lang.Short +Char <-> java.lang.Character +Int <-> java.lang.Integer +Long <-> java.lang.Long +Float <-> java.lang.Float +Double <-> java.lang.Double +Boolean <-> java.lang.Boolean +``` + +* An implicit definition that generates instances of type `T <:< T`, for any type `T`. Here, `<:<` is a class defined as follows. +```scala +sealed abstract class <:<[-From, +To] extends (From => To) +``` +Implicit parameters of `<:<` types are typically used to implement type constraints. diff --git a/docs/_spec/13-syntax-summary.md b/docs/_spec/13-syntax-summary.md new file mode 100644 index 000000000000..2dc971fc9840 --- /dev/null +++ b/docs/_spec/13-syntax-summary.md @@ -0,0 +1,185 @@ +--- +title: Syntax Summary +layout: default +chapter: 13 +--- + +# Syntax Summary + +The following descriptions of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. + +Informal descriptions are typeset as `“some comment”`. + +## Lexical Syntax + +The lexical syntax of Scala is given by the following grammar in EBNF form: + +```ebnf +whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ +upper ::= ‘A’ | ... | ‘Z’ | ‘$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that doesn't have + contributory property Other_Lowercase +lower ::= ‘a’ | ... | ‘z’ | ‘_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase +letter ::= upper | lower +digit ::= ‘0’ | ... | ‘9’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ +delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ +opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | + ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ +charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) +escapeSeq ::= UnicodeEscape | charEscapeSeq + +op ::= opchar {opchar} +varid ::= lower idrest +boundvarid ::= varid + | ‘`’ varid ‘`’ +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] +quoteId ::= ‘'’ alphaid +spliceId ::= ‘$’ alphaid ; + +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] + +floatingPointLiteral + ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] + | decimalNumeral exponentPart [floatType] + | decimalNumeral floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ + +booleanLiteral ::= ‘true’ | ‘false’ + +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ + +stringLiteral ::= ‘"’ {stringElement} ‘"’ + | ‘"""’ multiLineChars ‘"""’ +stringElement ::= charNoDoubleQuoteOrNewline + | escapeSeq +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} + +interpolatedString + ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart + ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘\$\$’ + | ‘\$"’ + | ‘\$’ alphaid + | ‘\$’ BlockExpr + +comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ + | ‘//’ “any sequence of characters up to end of line” + +nl ::= ´\mathit{“new line character”}´ +semi ::= ‘;’ | nl {nl} +``` + +## Optional Braces + +``` +colon ::= ':' -- with side conditions explained in 01-literal-syntax.md + <<< ts >>> ::= ‘{’ ts ‘}’ + | indent ts outdent +:<<< ts >>> ::= [nl] ‘{’ ts ‘}’ + | colon indent ts outdent +``` + +## Context-free Syntax + +´\color{red}{\text{TODO SCALA3: Once we're done porting the spec, make sure that +the references to grammar productions in the rest of the spec match this.}}´ + +The context-free syntax of Scala is given by the following EBNF grammar: + +```ebnf +RefineDcl ::= ‘val’ ValDcl + | ‘def’ DefDcl + | ‘type’ {nl} TypeDcl +Dcl ::= RefineDcl + | ‘var’ VarDcl +ValDcl ::= ids ‘:’ Type +VarDcl ::= ids ‘:’ Type +DefDcl ::= DefSig ‘:’ Type +DefSig ::= id [DefTypeParamClause] [TypelessClauses] [DefImplicitClause] +TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds + +Def ::= ‘val’ PatDef + | ‘var’ PatDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDcl + | TmplDef +PatDef ::= ids [‘:’ Type] ‘=’ Expr + | Pattern2 [‘:’ Type] ‘=’ Expr +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr + | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr + +TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘enum’ EnumDef + | ‘given’ GivenDef +ClassDef ::= id ClassConstr [Template] +ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses +ConstrMods ::= {Annotation} [AccessModifier] +ObjectDef ::= id [Template] +EnumDef ::= id ClassConstr InheritClauses EnumBody +GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] +Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} + ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods +ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef + | Export +Template ::= InheritClauses [TemplateBody] +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) +ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrExpr ::= SelfInvocation + | <<< SelfInvocation {semi BlockStat} >>> +SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} + +WithTemplateBody ::= <<< [SelfType] TemplateStat {semi TemplateStat} >>> +TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> +TemplateStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | {Annotation [nl]} {Modifier} Dcl + | Extension + | Expr1 + | EndMarker + | +SelfType ::= id [‘:’ InfixType] ‘=>’ + | ‘this’ ‘:’ InfixType ‘=>’ + +EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> +EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase +EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps]] | ids) + +TopStats ::= TopStat {semi TopStat} +TopStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | Extension + | Packaging + | PackageObject + | EndMarker + | +Packaging ::= ‘package’ QualId :<<< TopStats >>> +PackageObject ::= ‘package’ ‘object’ ObjectDef + +CompilationUnit ::= {‘package’ QualId semi} TopStats +``` diff --git a/docs/_spec/A1-deprecated.md b/docs/_spec/A1-deprecated.md new file mode 100644 index 000000000000..649c2d7d92e6 --- /dev/null +++ b/docs/_spec/A1-deprecated.md @@ -0,0 +1,21 @@ + +### Symbol Literals + +Symbol literals are no longer supported. + +The [`scala.Symbol`](https://scala-lang.org/api/3.x/scala/Symbol.html) class still exists, so a literal translation of the symbol literal `'xyz` is `Symbol("xyz")`. However, it is recommended to use a plain string literal `"xyz"` instead. (The `Symbol` class will be deprecated and removed in the future). Example: + + +``` +scalac Test.scala +-- Error: Test.scala:1:25 ------------------------------------------------------------------------------------------------ + +1 |@main def test = println('abc) + | ^ + | symbol literal 'abc is no longer supported, + | use a string literal "abc" or an application Symbol("abc") instead, + | or enclose in braces '{abc} if you want a quoted expression. + | For now, you can also `import language.deprecated.symbolLiterals` to accept + | the idiom, but this possibility might no longer be available in the future. +1 error found +``` diff --git a/docs/_spec/A2-scala-2-compatibility.md b/docs/_spec/A2-scala-2-compatibility.md new file mode 100644 index 000000000000..30ac1ac32fc2 --- /dev/null +++ b/docs/_spec/A2-scala-2-compatibility.md @@ -0,0 +1,40 @@ + +### Existential Types + +Existential types using `forSome` (as in [SLS §3.2.12](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#existential-types)) are not available in Scala 3. +Therefore when reading an existential type from Scala 2, the following happens: + +Existential types that can be expressed using only wildcards (but not +`forSome`) are treated as refined types. +For instance, the type +```scala +Map[_ <: AnyRef, Int] +``` +is treated as the type `Map`, where the first type parameter +is upper-bounded by `AnyRef` and the second type parameter is an alias +of `Int`. + +When reading class files compiled with Scala 2, Scala 3 will do a best +effort to approximate existential types with its own types. It will +issue a warning that a precise emulation is not possible. + +### Procedure Syntax + +Procedure syntax +```scala +def f() { ... } +``` +has been dropped. You need to write one of the following instead: +```scala +def f() = { ... } +def f(): Unit = { ... } +``` +Scala 3 accepts the old syntax under the `-source:3.0-migration` option. +If the `-migration` option is set, it can even rewrite old syntax to new. +The [Scalafix](https://scalacenter.github.io/scalafix/) tool also +can rewrite procedure syntax to make it Scala 3 compatible. + +## Compound Types (`with`) + +Intersection types `A & B` replace compound types `A with B` in Scala 2. +For the moment, the syntax `A with B` is still allowed and interpreted as `A & B`, but its usage as a type (as opposed to in a `new` or `extends` clause) will be deprecated and removed in the future. diff --git a/docs/_spec/A3-to-be-deprecated.md b/docs/_spec/A3-to-be-deprecated.md new file mode 100644 index 000000000000..98f758dee2d4 --- /dev/null +++ b/docs/_spec/A3-to-be-deprecated.md @@ -0,0 +1,4 @@ +This is a simple list of feature that are not deprecated yet, but will be in the future. +They should emit warnings or errors only when using the `-source:future` compiler flag. + +- [private[this] and protected[this]](../_docs/reference/dropped-features/this-qualifier.md) diff --git a/docs/_spec/APPLIEDreference/dropped-features/auto-apply.md b/docs/_spec/APPLIEDreference/dropped-features/auto-apply.md new file mode 100644 index 000000000000..b9aedb9f046b --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/auto-apply.md @@ -0,0 +1,96 @@ +--- +layout: doc-page +title: "Dropped: Auto-Application" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/auto-apply.html +--- + +Previously an empty argument list `()` was implicitly inserted when +calling a nullary method without arguments. Example: + +```scala +def next(): T = ... +next // is expanded to next() +``` + +In Scala 3, this idiom is an error. + +```scala +next +^^^^ +method next must be called with () argument +``` + +In Scala 3, the application syntax has to follow exactly the parameter +syntax. Excluded from this rule are methods that are defined in Java +or that override methods defined in Java. The reason for being more +lenient with such methods is that otherwise everyone would have to +write + +```scala +xs.toString().length() +``` + +instead of + +```scala +xs.toString.length +``` + +The latter is idiomatic Scala because it conforms to the _uniform +access principle_. This principle states that one should be able to +change an object member from a field to a non-side-effecting method +and back without affecting clients that access the +member. Consequently, Scala encourages to define such "property" +methods without a `()` parameter list whereas side-effecting methods +should be defined with it. Methods defined in Java cannot make this +distinction; for them a `()` is always mandatory. So Scala fixes the +problem on the client side, by allowing the parameterless references. +But where Scala allows that freedom for all method references, Scala 3 +restricts it to references of external methods that are not defined +themselves in Scala 3. + +For reasons of backwards compatibility, Scala 3 for the moment also +auto-inserts `()` for nullary methods that are defined in Scala 2, or +that override a method defined in Scala 2. It turns out that, because +the correspondence between definition and call was not enforced in +Scala so far, there are quite a few method definitions in Scala 2 +libraries that use `()` in an inconsistent way. For instance, we +find in `scala.math.Numeric` + +```scala +def toInt(): Int +``` + +whereas `toInt` is written without parameters everywhere +else. Enforcing strict parameter correspondence for references to +such methods would project the inconsistencies to client code, which +is undesirable. So Scala 3 opts for more leniency when type-checking +references to such methods until most core libraries in Scala 2 have +been cleaned up. + +Stricter conformance rules also apply to overriding of nullary +methods. It is no longer allowed to override a parameterless method +by a nullary method or _vice versa_. Instead, both methods must agree +exactly in their parameter lists. + +```scala +class A: + def next(): Int + +class B extends A: + def next: Int // overriding error: incompatible type +``` + +Methods overriding Java or Scala 2 methods are again exempted from this +requirement. + +## Migrating code + +Existing Scala code with inconsistent parameters can still be compiled +in Scala 3 under `-source 3.0-migration`. When paired with the `-rewrite` +option, the code will be automatically rewritten to conform to Scala 3's +stricter checking. + +## Reference + +For more information, see [Issue #2570](https://github.com/lampepfl/dotty/issues/2570) and [PR #2716](https://github.com/lampepfl/dotty/pull/2716). diff --git a/docs/_spec/APPLIEDreference/dropped-features/class-shadowing.md b/docs/_spec/APPLIEDreference/dropped-features/class-shadowing.md new file mode 100644 index 000000000000..a27b53db7cce --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/class-shadowing.md @@ -0,0 +1,31 @@ +--- +layout: doc-page +title: "Dropped: Class Shadowing" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/class-shadowing.html +--- + +Scala 2 so far allowed patterns like this: + +```scala +class Base { + class Ops { ... } +} + +class Sub extends Base { + class Ops { ... } +} +``` + +Scala 3 rejects this with the error message: + +```scala +6 | class Ops { } + | ^ + |class Ops cannot have the same name as class Ops in class Base + | -- class definitions cannot be overridden +``` + +The issue is that the two `Ops` classes _look_ like one overrides the +other, but classes in Scala 2 cannot be overridden. To keep things clean +(and its internal operations consistent) the Scala 3 compiler forces you +to rename the inner classes so that their names are different. diff --git a/docs/_spec/APPLIEDreference/dropped-features/delayed-init.md b/docs/_spec/APPLIEDreference/dropped-features/delayed-init.md new file mode 100644 index 000000000000..5d4f614ce951 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/delayed-init.md @@ -0,0 +1,32 @@ +--- +layout: doc-page +title: "Dropped: DelayedInit" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/delayed-init.html +--- + +The special handling of the [`DelayedInit`](https://scala-lang.org/api/3.x/scala/DelayedInit.html) +trait is no longer supported. + +One consequence is that the [`App`](https://scala-lang.org/api/3.x/scala/App.html) class, +which used [`DelayedInit`](https://scala-lang.org/api/3.x/scala/DelayedInit.html) is +now partially broken. You can still use `App` as a simple way to set up a main program. Example: + +```scala +object HelloWorld extends App { + println("Hello, world!") +} +``` + +However, the code is now run in the initializer of the object, which on +some JVM's means that it will only be interpreted. So, better not use it +for benchmarking! Also, if you want to access the command line arguments, +you need to use an explicit `main` method for that. + +```scala +object Hello: + def main(args: Array[String]) = + println(s"Hello, ${args(0)}") +``` + +On the other hand, Scala 3 offers a convenient alternative to such "program" objects +with [`@main` methods](../changed-features/main-functions.md). diff --git a/docs/_spec/APPLIEDreference/dropped-features/do-while.md b/docs/_spec/APPLIEDreference/dropped-features/do-while.md new file mode 100644 index 000000000000..08a730b8b5a7 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/do-while.md @@ -0,0 +1,41 @@ +--- +layout: doc-page +title: "Dropped: Do-While" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/do-while.html +--- + +The syntax construct +```scala +do while +``` +is no longer supported. Instead, it is recommended to use the equivalent `while` loop +below: +```scala +while ({ ; }) () +``` +For instance, instead of +```scala +do + i += 1 +while (f(i) == 0) +``` +one writes +```scala +while + i += 1 + f(i) == 0 +do () +``` +The idea to use a block as the condition of a while also gives a solution +to the "loop-and-a-half" problem. Here is another example: +```scala +while + val x: Int = iterator.next + x >= 0 +do print(".") +``` + +## Why Drop The Construct? + + - `do-while` is used relatively rarely and it can be expressed faithfully using just `while`. So there seems to be little point in having it as a separate syntax construct. + - Under the [new syntax rules](../other-new-features/control-syntax.md) `do` is used as a statement continuation, which would clash with its meaning as a statement introduction. diff --git a/docs/_spec/APPLIEDreference/dropped-features/dropped-features.md b/docs/_spec/APPLIEDreference/dropped-features/dropped-features.md new file mode 100644 index 000000000000..f6a13d9fa5da --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/dropped-features.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Dropped Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features.html +--- + +The following pages document the features of Scala 2 that have been dropped in Scala 3. diff --git a/docs/_spec/APPLIEDreference/dropped-features/early-initializers.md b/docs/_spec/APPLIEDreference/dropped-features/early-initializers.md new file mode 100644 index 000000000000..6f7c59c4f031 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/early-initializers.md @@ -0,0 +1,16 @@ +--- +layout: doc-page +title: "Dropped: Early Initializers" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/early-initializers.html +--- + +Early initializers of the form + +```scala +class C extends { ... } with SuperClass ... +``` + +have been dropped. They were rarely used, and mostly to compensate for the lack of +[trait parameters](../other-new-features/trait-parameters.md), which are now directly supported in Scala 3. + +For more information, see [SLS §5.1.6](https://www.scala-lang.org/files/archive/spec/2.13/05-classes-and-objects.html#early-definitions). diff --git a/docs/_spec/APPLIEDreference/dropped-features/existential-types.md b/docs/_spec/APPLIEDreference/dropped-features/existential-types.md new file mode 100644 index 000000000000..6ef815152cd0 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/existential-types.md @@ -0,0 +1,35 @@ +--- +layout: doc-page +title: "Dropped: Existential Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/existential-types.html +--- + +Existential types using `forSome` (as in +[SLS §3.2.12](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#existential-types)) +have been dropped. The reasons for dropping them are: + + - Existential types violate a type soundness principle on which DOT + and Scala 3 are constructed. That principle says that every + prefix (`p`, respectvely `S`) of a type selection `p.T` or `S#T` + must either come from a value constructed at runtime or refer to a + type that is known to have only good bounds. + + - Existential types create many difficult feature interactions + with other Scala constructs. + + - Existential types largely overlap with path-dependent types, + so the gain of having them is relatively minor. + +Existential types that can be expressed using only wildcards (but not +`forSome`) are still supported, but are treated as refined types. +For instance, the type +```scala +Map[_ <: AnyRef, Int] +``` +is treated as the type `Map`, where the first type parameter +is upper-bounded by `AnyRef` and the second type parameter is an alias +of `Int`. + +When reading class files compiled with Scala 2, Scala 3 will do a best +effort to approximate existential types with its own types. It will +issue a warning that a precise emulation is not possible. diff --git a/docs/_spec/APPLIEDreference/dropped-features/limit22.md b/docs/_spec/APPLIEDreference/dropped-features/limit22.md new file mode 100644 index 000000000000..e72aeadbe2ca --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/limit22.md @@ -0,0 +1,17 @@ +--- +layout: doc-page +title: "Dropped: Limit 22" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/limit22.html +--- + +The limits of 22 for the maximal number of parameters of function types and the +maximal number of fields in tuple types have been dropped. + +* Functions can now have an arbitrary number of parameters. Functions beyond + [`scala.Function22`](https://www.scala-lang.org/api/current/scala/Function22.html) are erased to a new trait [`scala.runtime.FunctionXXL`](https://scala-lang.org/api/3.x/scala/runtime/FunctionXXL.html). + +* Tuples can also have an arbitrary number of fields. Tuples beyond [`scala.Tuple22`](https://www.scala-lang.org/api/current/scala/Tuple22.html) + are erased to a new class [`scala.runtime.TupleXXL`](https://scala-lang.org/api/3.x/scala/runtime/TupleXXL.html) (which extends the trait [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.md)). Furthermore, they support generic + operation such as concatenation and indexing. + +Both of these are implemented using arrays. diff --git a/docs/_spec/APPLIEDreference/dropped-features/macros.md b/docs/_spec/APPLIEDreference/dropped-features/macros.md new file mode 100644 index 000000000000..7ffe9043d0cd --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/macros.md @@ -0,0 +1,16 @@ +--- +layout: doc-page +title: "Dropped: Scala 2 Macros" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html +--- + +The previous, experimental macro system has been dropped. + +Instead, there is a cleaner, more restricted system based on two complementary concepts: `inline` and `'{ ... }`/`${ ... }` code generation. +`'{ ... }` delays the compilation of the code and produces an object containing the code, dually `${ ... }` evaluates an expression which produces code and inserts it in the surrounding `${ ... }`. +In this setting, a definition marked as inlined containing a `${ ... }` is a macro, the code inside the `${ ... }` is executed at compile-time and produces code in the form of `'{ ... }`. +Additionally, the contents of code can be inspected and created with a more complex reflection API as an extension of `'{ ... }`/`${ ... }` framework. + +* `inline` has been [implemented](../metaprogramming/inline.md) in Scala 3. +* Quotes `'{ ... }` and splices `${ ... }` has been [implemented](../metaprogramming/macros.md) in Scala 3. +* [TASTy reflect](../metaprogramming/reflection.md) provides more complex tree based APIs to inspect or create quoted code. diff --git a/docs/_spec/APPLIEDreference/dropped-features/procedure-syntax.md b/docs/_spec/APPLIEDreference/dropped-features/procedure-syntax.md new file mode 100644 index 000000000000..de76fbb32af2 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/procedure-syntax.md @@ -0,0 +1,19 @@ +--- +layout: doc-page +title: "Dropped: Procedure Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/procedure-syntax.html +--- + +Procedure syntax +```scala +def f() { ... } +``` +has been dropped. You need to write one of the following instead: +```scala +def f() = { ... } +def f(): Unit = { ... } +``` +Scala 3 accepts the old syntax under the `-source:3.0-migration` option. +If the `-migration` option is set, it can even rewrite old syntax to new. +The [Scalafix](https://scalacenter.github.io/scalafix/) tool also +can rewrite procedure syntax to make it Scala 3 compatible. diff --git a/docs/_spec/APPLIEDreference/dropped-features/symlits.md b/docs/_spec/APPLIEDreference/dropped-features/symlits.md new file mode 100644 index 000000000000..d3c0180b16e6 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/symlits.md @@ -0,0 +1,24 @@ +--- +layout: doc-page +title: "Dropped: Symbol Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/symlits.html +--- + +Symbol literals are no longer supported. + +The [`scala.Symbol`](https://scala-lang.org/api/3.x/scala/Symbol.html) class still exists, so a literal translation of the symbol literal `'xyz` is `Symbol("xyz")`. However, it is recommended to use a plain string literal `"xyz"` instead. (The `Symbol` class will be deprecated and removed in the future). Example: + + +``` +scalac Test.scala +-- Error: Test.scala:1:25 ------------------------------------------------------------------------------------------------ + +1 |@main def test = println('abc) + | ^ + | symbol literal 'abc is no longer supported, + | use a string literal "abc" or an application Symbol("abc") instead, + | or enclose in braces '{abc} if you want a quoted expression. + | For now, you can also `import language.deprecated.symbolLiterals` to accept + | the idiom, but this possibility might no longer be available in the future. +1 error found +``` diff --git a/docs/_spec/APPLIEDreference/dropped-features/this-qualifier.md b/docs/_spec/APPLIEDreference/dropped-features/this-qualifier.md new file mode 100644 index 000000000000..3fcaefb7e0d8 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/this-qualifier.md @@ -0,0 +1,33 @@ +--- +layout: doc-page +title: "Dropped: private[this] and protected[this]" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +--- + +The `private[this]` and `protected[this]` access modifiers will be deprecated and phased out. + +Previously, these modifiers were needed for + + - avoiding the generation of getters and setters + - excluding code under a `private[this]` from variance checks. (Scala 2 also excludes `protected[this]` but this was found to be unsound and was therefore removed). + - avoiding the generation of fields, if a `private[this] val` is not accessed + by a class method. + +The compiler now infers for `private` members the fact that they are only accessed via `this`. Such members are treated as if they had been declared `private[this]`. `protected[this]` is dropped without a replacement. + +This change can in some cases change the semantics of a Scala program, since a +`private` val is no longer guaranteed to generate a field. The field +is omitted if + + - the `val` is only accessed via `this`, and + - the `val` is not accessed from a method in the current class. + +This can cause problems if a program tries to access the missing private field via reflection. The recommended fix is to declare the field instead to be qualified private with the enclosing class as qualifier. Example: +```scala + class C(x: Int): + private[C] val field = x + 1 + // [C] needed if `field` is to be accessed through reflection + val retained = field * field +``` + + diff --git a/docs/_spec/APPLIEDreference/enums/adts.md b/docs/_spec/APPLIEDreference/enums/adts.md new file mode 100644 index 000000000000..23599e49dc5b --- /dev/null +++ b/docs/_spec/APPLIEDreference/enums/adts.md @@ -0,0 +1,90 @@ +--- +layout: doc-page +title: "Algebraic Data Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/adts.html +--- + +The [`enum` concept](./enums.md) is general enough to also support algebraic data types (ADTs) and their generalized version (GADTs). +Here is an example how an `Option` type can be represented as an ADT: + +```scala +enum Option[+T]: + case Some(x: T) + case None +``` + +This example introduces an `Option` enum with a covariant type parameter `T` consisting of two cases, `Some` and `None`. +`Some` is parameterized with a value parameter `x`. +It is a shorthand for writing a case class that extends `Option`. +Since `None` is not parameterized, it is treated as a normal enum value. + +The `extends` clauses that were omitted in the example above can also be given explicitly: + +```scala +enum Option[+T]: + case Some(x: T) extends Option[T] + case None extends Option[Nothing] +``` + +Note that the parent type of the `None` value is inferred as `Option[Nothing]`. +Generally, all covariant type parameters of the enum class are minimized in a compiler-generated `extends` clause whereas all contravariant type parameters are maximized. +If `Option` was non-variant, you would need to give the extends clause of `None` explicitly. + +As for normal enum values, the cases of an `enum` are all defined in the `enum`'s companion object. +So it's `Option.Some` and `Option.None` unless the definitions are "pulled out" with an import. + + +## Widening of Constructor Application + +Observe here the inferred result types of the following expressions: +```scala +scala> Option.Some("hello") +val res1: t2.Option[String] = Some(hello) + +scala> Option.None +val res2: t2.Option[Nothing] = None +``` + +Note that the type of the expressions above is always `Option`. +Generally, the type of a enum case constructor application will be widened to the underlying enum type, unless a more specific type is expected. +This is a subtle difference with respect to normal case classes. +The classes making up the cases do exist, and can be unveiled, either by constructing them directly with a `new`, or by explicitly providing an expected type. + +```scala +scala> new Option.Some(2) +val res3: Option.Some[Int] = Some(2) +scala> val x: Option.Some[Int] = Option.Some(3) +val res4: Option.Some[Int] = Some(3) +``` + +As all other enums, ADTs can define methods. +For instance, here is `Option` again, with an `isDefined` method and an `Option(...)` constructor in its companion object. + +```scala +enum Option[+T]: + case Some(x: T) + case None + + def isDefined: Boolean = this match + case None => false + case _ => true + +object Option: + + def apply[T >: Null](x: T): Option[T] = + if x == null then None else Some(x) + +end Option +``` + +Enumerations and ADTs have been presented as two different concepts. +But since they share the same syntactic construct, they can be seen simply as two ends of a spectrum and it is perfectly possible to construct hybrids. +For instance, the code below gives an implementation of `Color` either with three enum values or with a parameterized case that takes an RGB value. + +```scala +enum Color(val rgb: Int): + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) + case Mix(mix: Int) extends Color(mix) +``` diff --git a/docs/_spec/APPLIEDreference/enums/enums-index.md b/docs/_spec/APPLIEDreference/enums/enums-index.md new file mode 100644 index 000000000000..80d703c3e897 --- /dev/null +++ b/docs/_spec/APPLIEDreference/enums/enums-index.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Enums" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/index.html +--- + +This chapter documents enums in Scala 3. diff --git a/docs/_spec/APPLIEDreference/enums/enums.md b/docs/_spec/APPLIEDreference/enums/enums.md new file mode 100644 index 000000000000..bcab50d3a36d --- /dev/null +++ b/docs/_spec/APPLIEDreference/enums/enums.md @@ -0,0 +1,182 @@ +--- +layout: doc-page +title: "Enumerations" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/enums.html +--- + +An enumeration is used to define a type consisting of a set of named values. + +```scala +enum Color: + case Red, Green, Blue +``` + +This defines a new `sealed` class, `Color`, with three values, `Color.Red`, `Color.Green`, `Color.Blue`. +The color values are members of `Color`s companion object. + +## Parameterized enums + +Enums can be parameterized. + +```scala +enum Color(val rgb: Int): + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) +``` + +As the example shows, you can define the parameter value by using an explicit extends clause. + +## Methods defined for enums + +The values of an enum correspond to unique integers. +The integer associated with an enum value is returned by its `ordinal` method: + +```scala +scala> val red = Color.Red +val red: Color = Red +scala> red.ordinal +val res0: Int = 0 +``` + +The companion object of an enum also defines three utility methods. +The `valueOf` method obtains an enum value by its name. +The `values` method returns all enum values defined in an enumeration in an `Array`. +The `fromOrdinal` method obtains an enum value from its ordinal (`Int`) value. + +```scala +scala> Color.valueOf("Blue") +val res0: Color = Blue +scala> Color.values +val res1: Array[Color] = Array(Red, Green, Blue) +scala> Color.fromOrdinal(0) +val res2: Color = Red +``` + +## User-defined members of enums + +It is possible to add your own definitions to an enum. +For example: + +```scala +enum Planet(mass: Double, radius: Double): + private final val G = 6.67300E-11 + def surfaceGravity = G * mass / (radius * radius) + def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity + + case Mercury extends Planet(3.303e+23, 2.4397e6) + case Venus extends Planet(4.869e+24, 6.0518e6) + case Earth extends Planet(5.976e+24, 6.37814e6) + case Mars extends Planet(6.421e+23, 3.3972e6) + case Jupiter extends Planet(1.9e+27, 7.1492e7) + case Saturn extends Planet(5.688e+26, 6.0268e7) + case Uranus extends Planet(8.686e+25, 2.5559e7) + case Neptune extends Planet(1.024e+26, 2.4746e7) +end Planet +``` + +## User-defined companion object of enums +It is also possible to define an explicit companion object for an enum: + +```scala +object Planet: + def main(args: Array[String]) = + val earthWeight = args(0).toDouble + val mass = earthWeight / Earth.surfaceGravity + for p <- values do + println(s"Your weight on $p is ${p.surfaceWeight(mass)}") +end Planet +``` + +## Restrictions on Enum Cases + +Enum case declarations are similar to secondary constructors: +they are scoped outside of the enum template, despite being declared within it. +This means that enum case declarations cannot access inner members of the enum class. + +Similarly, enum case declarations may not directly reference members of the enum's companion object, even if they are imported (directly, or by renaming). +For example: + +```scala +import Planet.* +enum Planet(mass: Double, radius: Double): + private final val (mercuryMass, mercuryRadius) = (3.303e+23, 2.4397e6) + + case Mercury extends Planet(mercuryMass, mercuryRadius) // Not found + case Venus extends Planet(venusMass, venusRadius) // illegal reference + case Earth extends Planet(Planet.earthMass, Planet.earthRadius) // ok +object Planet: + private final val (venusMass, venusRadius) = (4.869e+24, 6.0518e6) + private final val (earthMass, earthRadius) = (5.976e+24, 6.37814e6) +end Planet +``` +The fields referenced by `Mercury` are not visible, and the fields referenced by `Venus` may not be referenced directly (using `import Planet.*`). +You must use an indirect reference, such as demonstrated with `Earth`. + +## Deprecation of Enum Cases + +As a library author, you may want to signal that an enum case is no longer intended for use. +However you could still want to gracefully handle the removal of a case from your public API, such as special casing deprecated cases. + +To illustrate, say that the `Planet` enum originally had an additional case: + +```diff + enum Planet(mass: Double, radius: Double): + ... + case Neptune extends Planet(1.024e+26, 2.4746e7) ++ case Pluto extends Planet(1.309e+22, 1.1883e3) + end Planet +``` + +We now want to deprecate the `Pluto` case. +First we add the `scala.deprecated` annotation to `Pluto`: + +```diff + enum Planet(mass: Double, radius: Double): + ... + case Neptune extends Planet(1.024e+26, 2.4746e7) +- case Pluto extends Planet(1.309e+22, 1.1883e3) ++ ++ @deprecated("refer to IAU definition of planet") ++ case Pluto extends Planet(1.309e+22, 1.1883e3) + end Planet +``` + +Outside the lexical scopes of `enum Planet` or `object Planet`, references to `Planet.Pluto` will produce a deprecation warning. +Within those scopes however, we can still reference it to implement introspection over the deprecated cases: + +```scala +trait Deprecations[T <: reflect.Enum] { + extension (t: T) def isDeprecatedCase: Boolean +} + +object Planet { + given Deprecations[Planet] with { + extension (p: Planet) + def isDeprecatedCase = p == Pluto + } +} +``` + +We could imagine that a library may use [type class derivation](../contextual/derivation.md) to automatically provide an instance for `Deprecations`. + +## Compatibility with Java Enums + +If you want to use the Scala-defined enums as [Java enums](https://docs.oracle.com/javase/tutorial/java/javaOO/enum.html), you can do so by extending the class `java.lang.Enum`, which is imported by default, as follows: + +```scala +enum Color extends Enum[Color] { case Red, Green, Blue } +``` + +The type parameter comes from the Java enum [definition](https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Enum.html) and should be the same as the type of the enum. +There is no need to provide constructor arguments (as defined in the Java API docs) to `java.lang.Enum` when extending it – the compiler will generate them automatically. + +After defining `Color` like that, you can use it like you would a Java enum: + +```scala +scala> Color.Red.compareTo(Color.Green) +val res15: Int = -1 +``` + +For a more in-depth example of using Scala 3 enums from Java, see [this test](https://github.com/lampepfl/dotty/tree/main/tests/run/enum-java). +In the test, the enums are defined in the `MainScala.scala` file and used from a Java source, `Test.java`. diff --git a/docs/_spec/APPLIEDreference/new-types/intersection-types.md b/docs/_spec/APPLIEDreference/new-types/intersection-types.md new file mode 100644 index 000000000000..4720649e16a9 --- /dev/null +++ b/docs/_spec/APPLIEDreference/new-types/intersection-types.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "Intersection Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html +--- + +Used on types, the `&` operator creates an intersection type. + +## Type Checking + +The type `S & T` represents values that are of the type `S` and `T` at the same time. + +```scala +trait Resettable: + def reset(): Unit + +trait Growable[T]: + def add(t: T): Unit + +def f(x: Resettable & Growable[String]) = + x.reset() + x.add("first") +``` + +The parameter `x` is required to be _both_ a `Resettable` and a +`Growable[String]`. + +The members of an intersection type `A & B` are all the members of `A` and all +the members of `B`. For instance `Resettable & Growable[String]` +has member methods `reset` and `add`. + +`&` is _commutative_: `A & B` is the same type as `B & A`. + +If a member appears in both `A` and `B`, its type in `A & B` is the intersection +of its type in `A` and its type in `B`. For instance, assume the definitions: + +```scala +trait A: + def children: List[A] + +trait B: + def children: List[B] + +val x: A & B = new C +val ys: List[A & B] = x.children +``` + +The type of `children` in `A & B` is the intersection of `children`'s +type in `A` and its type in `B`, which is `List[A] & List[B]`. This +can be further simplified to `List[A & B]` because `List` is +covariant. + +One might wonder how the compiler could come up with a definition for +`children` of type `List[A & B]` since what is given are `children` +definitions of type `List[A]` and `List[B]`. The answer is the compiler does not +need to. `A & B` is just a type that represents a set of requirements for +values of the type. At the point where a value is _constructed_, one +must make sure that all inherited members are correctly defined. +So if one defines a class `C` that inherits `A` and `B`, one needs +to give at that point a definition of a `children` method with the required type. + +```scala +class C extends A, B: + def children: List[A & B] = ??? +``` diff --git a/docs/_spec/APPLIEDreference/new-types/type-lambdas.md b/docs/_spec/APPLIEDreference/new-types/type-lambdas.md new file mode 100644 index 000000000000..4de3b260c0a2 --- /dev/null +++ b/docs/_spec/APPLIEDreference/new-types/type-lambdas.md @@ -0,0 +1,15 @@ +--- +layout: doc-page +title: "Type Lambdas" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/type-lambdas.html +--- + +A _type lambda_ lets one express a higher-kinded type directly, without +a type definition. + +```scala +[X, Y] =>> Map[Y, X] +``` + +For instance, the type above defines a binary type constructor, which maps arguments `X` and `Y` to `Map[Y, X]`. +Type parameters of type lambdas can have bounds, but they cannot carry `+` or `-` variance annotations. diff --git a/docs/_spec/APPLIEDreference/new-types/union-types.md b/docs/_spec/APPLIEDreference/new-types/union-types.md new file mode 100644 index 000000000000..152505d7fc8d --- /dev/null +++ b/docs/_spec/APPLIEDreference/new-types/union-types.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: "Union Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/union-types.html +--- + +A union type `A | B` has as values all values of type `A` and also all values of type `B`. + + +```scala +case class UserName(name: String) +case class Password(hash: Hash) + +def help(id: UserName | Password) = + val user = id match + case UserName(name) => lookupName(name) + case Password(hash) => lookupPassword(hash) + ... +``` + +Union types are duals of intersection types. `|` is _commutative_: +`A | B` is the same type as `B | A`. + +The compiler will assign a union type to an expression only if such a +type is explicitly given. This can be seen in the following [REPL](https://docs.scala-lang.org/overviews/repl/overview.html) transcript: + +```scala +scala> val password = Password(123) +val password: Password = Password(123) + +scala> val name = UserName("Eve") +val name: UserName = UserName(Eve) + +scala> if true then name else password +val res2: Object = UserName(Eve) + +scala> val either: Password | UserName = if true then name else password +val either: Password | UserName = UserName(Eve) +``` + +The type of `res2` is `Object & Product`, which is a supertype of +`UserName` and `Password`, but not the least supertype `Password | +UserName`. If we want the least supertype, we have to give it +explicitly, as is done for the type of `either`. + +## Type inference + +When inferring the result type of a definition (`val`, `var`, or `def`) and the +type we are about to infer is a union type, then we replace it by its join. +Similarly, when instantiating a type argument, if the corresponding type +parameter is not upper-bounded by a union type and the type we are about to +instantiate is a union type, we replace it by its join. This mirrors the +treatment of singleton types which are also widened to their underlying type +unless explicitly specified. The motivation is the same: inferring types +which are "too precise" can lead to unintuitive typechecking issues later on. + +**Note:** Since this behavior limits the usability of union types, it might +be changed in the future. For example by not widening unions that have been +explicitly written down by the user and not inferred, or by not widening a type +argument when the corresponding type parameter is covariant. + +See [PR #2330](https://github.com/lampepfl/dotty/pull/2330) and +[Issue #4867](https://github.com/lampepfl/dotty/issues/4867) for further discussions. + +### Example + +```scala +import scala.collection.mutable.ListBuffer +val x = ListBuffer(Right("foo"), Left(0)) +val y: ListBuffer[Either[Int, String]] = x +``` + +This code typechecks because the inferred type argument to `ListBuffer` in the +right-hand side of `x` was `Left[Int, Nothing] | Right[Nothing, String]` which +was widened to `Either[Int, String]`. If the compiler hadn't done this widening, +the last line wouldn't typecheck because `ListBuffer` is invariant in its +argument. diff --git a/docs/_spec/APPLIEDreference/other-new-features/kind-polymorphism.md b/docs/_spec/APPLIEDreference/other-new-features/kind-polymorphism.md new file mode 100644 index 000000000000..685630b86f73 --- /dev/null +++ b/docs/_spec/APPLIEDreference/other-new-features/kind-polymorphism.md @@ -0,0 +1,41 @@ +--- +layout: doc-page +title: "Kind Polymorphism" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html +--- + +Normally type parameters in Scala are partitioned into _kinds_. First-level types are types of values. Higher-kinded types are type constructors +such as `List` or `Map`. The kind of a type is indicated by the top type of which it is a subtype. Normal types are subtypes of `Any`, +covariant single argument type constructors such as `List` are subtypes of `[+X] =>> Any`, and the `Map` type constructor is +a subtype of `[X, +Y] =>> Any`. + +A type can be used only as prescribed by its kind. Subtypes of `Any` cannot be applied to type arguments whereas subtypes of `[X] =>> Any` +_must_ be applied to a type argument, unless they are passed to type parameters of the same kind. + +Sometimes we would like to have type parameters that can have more than one kind, for instance to define an implicit +value that works for parameters of any kind. This is now possible through a form of (_subtype_) kind polymorphism. +Kind polymorphism relies on the special type [`scala.AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) that can be used as an upper bound of a type. + +```scala +def f[T <: AnyKind] = ... +``` + +The actual type arguments of `f` can then be types of arbitrary kinds. So the following would all be legal: + +```scala +f[Int] +f[List] +f[Map] +f[[X] =>> String] +``` + +We call type parameters and abstract types with an `AnyKind` upper bound _any-kinded types_. +Since the actual kind of an any-kinded type is unknown, its usage must be heavily restricted: An any-kinded type +can be neither the type of a value, nor can it be instantiated with type parameters. So about the only +thing one can do with an any-kinded type is to pass it to another any-kinded type argument. +Nevertheless, this is enough to achieve some interesting generalizations that work across kinds, typically +through advanced uses of implicits. + +(todo: insert good concise example) + +`AnyKind` plays a special role in Scala's subtype system: It is a supertype of all other types no matter what their kind is. It is also assumed to be kind-compatible with all other types. Furthermore, `AnyKind` is treated as a higher-kinded type (so it cannot be used as a type of values), but at the same time it has no type parameters (so it cannot be instantiated). diff --git a/docs/_spec/APPLIEDreference/other-new-features/trait-parameters.md b/docs/_spec/APPLIEDreference/other-new-features/trait-parameters.md new file mode 100644 index 000000000000..7924224ddc74 --- /dev/null +++ b/docs/_spec/APPLIEDreference/other-new-features/trait-parameters.md @@ -0,0 +1,34 @@ +--- +layout: doc-page +title: "Trait Parameters" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/trait-parameters.html +--- + +Scala 3 enables traits to have parameters, just like a class. + +For example, here is a trait `Greeting`: +```scala +trait Greeting(val name: String): + def msg = s"How are you, $name" +``` + +A class, enum, or object can extend `Greeting` as follows: + +```scala +class Greet extends Greeting("Bob"): + println(msg) +``` + +However if another trait extends `Greeting` then it must not pass arguments: + +```scala +trait FormalGreeting extends Greeting: + override def msg = s"How do you do, $name" +``` + +If you want a class to greet Bob formally, then you should extend both `FormalGreeting` and `Greeting`: + +```scala +class GreetFormally extends FormalGreeting, Greeting("Bob"): + println(msg) +``` diff --git a/docs/_spec/Dockerfile b/docs/_spec/Dockerfile new file mode 100644 index 000000000000..1fc28081c59f --- /dev/null +++ b/docs/_spec/Dockerfile @@ -0,0 +1,26 @@ +FROM ruby:2.7 + +RUN apt-get install -y curl \ + && curl -sL https://deb.nodesource.com/setup_18.x | bash - \ + && apt-get install -y nodejs \ + && curl -L https://www.npmjs.com/install.sh | sh + +RUN gem update --system +RUN gem install sass-embedded -v 1.58.0 +RUN gem install bundler:1.17.2 jekyll + +WORKDIR /srv/jekyll + +COPY Gemfile . +COPY Gemfile.lock . + + +RUN echo -n "bundle version: " && bundle --version +RUN bundle install +RUN mkdir /opt/npm-global +RUN npm config set prefix '/opt/npm-global' +RUN npm config set global true +RUN npm install bower +RUN echo -n "npm version: " && npm --version +RUN chmod u+s /bin/chown +RUN date diff --git a/docs/_spec/Gemfile b/docs/_spec/Gemfile new file mode 100644 index 000000000000..bc45dc84db8c --- /dev/null +++ b/docs/_spec/Gemfile @@ -0,0 +1,8 @@ +# To build the spec on Travis CI +source "https://rubygems.org" + +gem "jekyll", "3.6.3" +gem "webrick" +gem "rouge" +# gem 's3_website' +gem "redcarpet", "3.5.1" diff --git a/docs/_spec/Gemfile.lock b/docs/_spec/Gemfile.lock new file mode 100644 index 000000000000..48efd373725e --- /dev/null +++ b/docs/_spec/Gemfile.lock @@ -0,0 +1,57 @@ +GEM + remote: https://rubygems.org/ + specs: + addressable (2.8.1) + public_suffix (>= 2.0.2, < 6.0) + colorator (1.1.0) + ffi (1.15.5) + forwardable-extended (2.6.0) + jekyll (3.6.3) + addressable (~> 2.4) + colorator (~> 1.0) + jekyll-sass-converter (~> 1.0) + jekyll-watch (~> 1.1) + kramdown (~> 1.14) + liquid (~> 4.0) + mercenary (~> 0.3.3) + pathutil (~> 0.9) + rouge (>= 1.7, < 3) + safe_yaml (~> 1.0) + jekyll-sass-converter (1.5.2) + sass (~> 3.4) + jekyll-watch (1.5.1) + listen (~> 3.0) + kramdown (1.17.0) + liquid (4.0.3) + listen (3.7.1) + rb-fsevent (~> 0.10, >= 0.10.3) + rb-inotify (~> 0.9, >= 0.9.10) + mercenary (0.3.6) + pathutil (0.16.2) + forwardable-extended (~> 2.6) + public_suffix (5.0.0) + rb-fsevent (0.11.2) + rb-inotify (0.10.1) + ffi (~> 1.0) + redcarpet (3.5.1) + rouge (2.2.1) + safe_yaml (1.0.5) + sass (3.7.4) + sass-listen (~> 4.0.0) + sass-listen (4.0.0) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + webrick (1.7.0) + +PLATFORMS + ruby + x86_64-linux + +DEPENDENCIES + jekyll (= 3.6.3) + redcarpet (= 3.5.1) + rouge + webrick + +BUNDLED WITH + 2.3.5 diff --git a/docs/_spec/README.md b/docs/_spec/README.md new file mode 100644 index 000000000000..b9eba413f8a2 --- /dev/null +++ b/docs/_spec/README.md @@ -0,0 +1,67 @@ +# WIP Scala 3 Language Specification + +**This is still a work in progress, and should *not* be regarded as a source of truth.** + +First of all, the language specification is meant to be correct, precise and clear. + +Second, editing, previewing and generating output for the markdown should be simple and easy. + +Third, we'd like to support different output formats. An html page per chapter with MathJax seems like a good start, as it satisfies the second requirement, and enables the first one. + +## Editing + +We are using Jekyll and [Redcarpet](https://github.com/vmg/redcarpet) to generate the html. + +Check `Gemfile` for the current versions. + +We aim to track the configuration GitHub Pages uses but differences may arise as GitHub Pages evolves. + +## Building + + +To preview locally, run the following commands in the docs/_spec subfolder: + +``` +env UID="$(id -u)" GID="$(id -g)" docker-compose up +``` + +and open http://0.0.0.0:4000/files/archive/spec/2.13/ to view the spec. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`. + + +## General Advice for editors + +- All files must be saved as UTF-8: ensure your editors are configured appropriately. +- Use of the appropriate unicode characters instead of the latex modifiers for accents, etc. is necessary. For example, é instead of `\'e`. +- MathJAX errors will appear within the rendered DOM as span elements with class `mtext` and style attribute `color: red` applied. It is possible to search for this combination in the development tools of the browser of your choice. In chrome, CTRL+F / CMD+F within the inspect element panel allows you to do this. + +- This document follows the "one sentence <=> one line" convention, with the following exceptions below. + - A multiline code block is part of the sentence + - An enumeration of links is long enough + +- Whenever doing an enumeration of the kind "a, ..., z", follow the following conventions: + - It should always be "separator whitespace period period period separator whitespace", for example `, ..., ` or `,\n...,\n` for multiline. + - If in a code block, only the elements (a and z above) should be in math mode (between forward ticks) + - If in a math expression, the whole thing should be in a single math mode + - Look at the [Tuple Types section](docs/_spec/03-types.html#tuple-types) for an example of the different cases above. + +- Try to use "Note" blocks to point out logical conclusions that are not obvious, for examples, look at the [Tuple Types section](docs/_spec/03-types.html#tuple-types). + +### Macro replacements: + +- While MathJAX just support LaTeX style command definition, it is recommended to not use this as it will likely cause issues with preparing the document for PDF or ebook distribution. +- `\SS` (which I could not find defined within the latex source) seems to be closest to `\mathscr{S}` +- `\TYPE` is equivalent to `\boldsymbol{type}' +- As MathJAX has no support for slanted font (latex command \sl), so in all instances this should be replaced with \mathit{} +- The macro \U{ABCD} used for unicode character references can be replaced with \\uABCD. +- The macro \URange{ABCD}{DCBA} used for unicode character ranges can be replaced with \\uABCD-\\uDBCA. +- The macro \commadots can be replaced with ` , … , ` (But should not, see above). +- There is no adequate replacement for `\textsc{...}` (small caps) in pandoc markdown. While unicode contains a number of small capital letters, it is notably missing Q and X as these glyphs are intended for phonetic spelling, therefore these cannot be reliably used. For now, the best option is to use underscore emphasis and capitalise the text manually, `_LIKE THIS_`. + +### Unicode Character replacements + +- The unicode left and right single quotation marks (‘ and ’ (U+2018 and U+2019, respectively)) have been used in place of ` and ', where the quotation marks are intended to be paired. These can be typed on a mac using Option+] for a left quote and Option+Shift+] for the right quote. +- Similarly for left and right double quotation marks (“ and ” (U+201C and U+201D, respectively)) in place of ". These can be typed on a mac using Option+[ and Option+Shift+]. diff --git a/docs/_spec/TODOreference/changed-features/changed-features.md b/docs/_spec/TODOreference/changed-features/changed-features.md new file mode 100644 index 000000000000..cacdc2598a02 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/changed-features.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Other Changed Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features.html +--- + +The following pages document the features that have changed in Scala 3, compared to Scala 2. diff --git a/docs/_spec/TODOreference/changed-features/compiler-plugins.md b/docs/_spec/TODOreference/changed-features/compiler-plugins.md new file mode 100644 index 000000000000..20bdb7f49836 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/compiler-plugins.md @@ -0,0 +1,128 @@ +--- +layout: doc-page +title: "Changes in Compiler Plugins" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/compiler-plugins.html +--- + +Compiler plugins are supported by Dotty (and Scala 3) since 0.9. There are two notable changes +compared to `scalac`: + +- No support for analyzer plugins +- Added support for research plugins + +[Analyzer plugins][1] in `scalac` run during type checking and may influence +normal type checking. This is a very powerful feature but for production usages, +a predictable and consistent type checker is more important. + +For experimentation and research, Scala 3 introduces _research plugin_. Research plugins +are more powerful than `scalac` analyzer plugins as they let plugin authors customize +the whole compiler pipeline. One can easily replace the standard typer by a custom one or +create a parser for a domain-specific language. However, research plugins are only +enabled for nightly or snaphot releases of Scala 3. + +Common plugins that add new phases to the compiler pipeline are called +_standard plugins_ in Scala 3. In terms of features, they are similar to +`scalac` plugins, despite minor changes in the API. + +## Using Compiler Plugins + +Both standard and research plugins can be used with `scalac` by adding the `-Xplugin:` option: + +```shell +scalac -Xplugin:pluginA.jar -Xplugin:pluginB.jar Test.scala +``` + +The compiler will examine the jar provided, and look for a property file named +`plugin.properties` in the root directory of the jar. The property file specifies +the fully qualified plugin class name. The format of a property file is as follows: + +```properties +pluginClass=dividezero.DivideZero +``` + +This is different from `scalac` plugins that required a `scalac-plugin.xml` file. + +Starting from 1.1.5, `sbt` also supports Scala 3 compiler plugins. Please refer to the +[`sbt` documentation][2] for more information. + +## Writing a Standard Compiler Plugin + +Here is the source code for a simple compiler plugin that reports integer divisions by +zero as errors. + +```scala +package dividezero + +import dotty.tools.dotc.ast.Trees.* +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.plugins.{PluginPhase, StandardPlugin} +import dotty.tools.dotc.transform.{Pickler, Staging} + +class DivideZero extends StandardPlugin: + val name: String = "divideZero" + override val description: String = "divide zero check" + + def init(options: List[String]): List[PluginPhase] = + (new DivideZeroPhase) :: Nil + +class DivideZeroPhase extends PluginPhase: + import tpd.* + + val phaseName = "divideZero" + + override val runsAfter = Set(Pickler.name) + override val runsBefore = Set(Staging.name) + + override def transformApply(tree: Apply)(implicit ctx: Context): Tree = + tree match + case Apply(Select(rcvr, nme.DIV), List(Literal(Constant(0)))) + if rcvr.tpe <:< defn.IntType => + report.error("dividing by zero", tree.pos) + case _ => + () + tree +end DivideZeroPhase +``` + +The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` +and implement the method `init` that takes the plugin's options as argument +and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. + +Our plugin adds one compiler phase to the pipeline. A compiler phase must extend +the `PluginPhase` trait. In order to specify when the phase is executed, we also +need to specify a `runsBefore` and `runsAfter` constraints that are list of phase +names. + +We can now transform trees by overriding methods like `transformXXX`. + +## Writing a Research Compiler Plugin + +Here is a template for research plugins. + +```scala +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.plugins.ResearchPlugin + +class DummyResearchPlugin extends ResearchPlugin: + val name: String = "dummy" + override val description: String = "dummy research plugin" + + def init(options: List[String], phases: List[List[Phase]])(implicit ctx: Context): List[List[Phase]] = + phases +end DummyResearchPlugin +``` + +A research plugin must extend the trait `ResearchPlugin` and implement the +method `init` that takes the plugin's options as argument as well as the compiler +pipeline in the form of a list of compiler phases. The method can replace, remove +or add any phases to the pipeline and return the updated pipeline. + + +[1]: https://github.com/scala/scala/blob/2.13.x/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +[2]: https://www.scala-sbt.org/1.x/docs/Compiler-Plugins.html diff --git a/docs/_spec/TODOreference/changed-features/eta-expansion-spec.md b/docs/_spec/TODOreference/changed-features/eta-expansion-spec.md new file mode 100644 index 000000000000..a62d45df9e11 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/eta-expansion-spec.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: "Automatic Eta Expansion - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/eta-expansion-spec.html +--- + +## Motivation + +Scala maintains a convenient distinction between _methods_ and _functions_. +Methods are part of the definition of a class that can be invoked in objects while functions are complete objects themselves, making them first-class entities. For example, they can be assigned to variables. +These two mechanisms are bridged in Scala by a mechanism called +[_eta-expansion_](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#eta-expansion-section) +(also called eta-abstraction), which converts a reference to a method into a function. Intuitively, a method `m` can be passed around by turning it into an object: the function `x => m(x)`. + +In this snippet which assigns a method to a `val`, the compiler will perform _automatic eta-expansion_, as shown in the comment: + +```scala +def m(x: Int, y: String) = ??? +val f = m // becomes: val f = (x: Int, y: String) => m(x, y) +``` + +In Scala 2, a method reference `m` is converted to a function value only if the expected type is a function type, which means the conversion in the example above would not have been triggered, because `val f` does not have a type ascription. To still get eta-expansion, a shortcut `m _` would force the conversion. + +For methods with one or more parameters like in the example above, this restriction has now been dropped. The syntax `m _` is no longer needed and will be deprecated in the future. + +## Automatic eta-expansion and partial application +In the following example `m` can be partially applied to the first two parameters. +Assigning `m` to `f1` will automatically eta-expand. + +```scala +def m(x: Boolean, y: String)(z: Int): List[Int] +val f1 = m +val f2 = m(true, "abc") +``` + +This creates two function values: + +```scala +f1: (Boolean, String) => Int => List[Int] +f2: Int => List[Int] +``` + +## Automatic eta-expansion and implicit parameter lists + +Methods with implicit parameter lists will always get applied to implicit arguments. + +```scala +def foo(x: Int)(implicit p: Double): Float = ??? +implicit val bla: Double = 1.0 + +val bar = foo // val bar: Int => Float = ... +``` + +## Automatic Eta-Expansion and query types + +A method with context parameters can be expanded to a value of a context type by writing the expected context type explicitly. + +```scala +def foo(x: Int)(using p: Double): Float = ??? +val bar: Double ?=> Float = foo(3) +``` + +## Rules + +- If `m` has an argument list with one or more parameters, we always eta-expand +- If `m` is has an empty argument list (i.e. has type `()R`): + 1. If the expected type is of the form `() => T`, we eta expand. + 2. If m is defined by Java, or overrides a Java defined method, we insert `()`. + 3. Otherwise we issue an error of the form: + +Thus, an unapplied method with an empty argument list is only converted to a function when a function type is expected. It is considered best practice to either explicitly apply the method to `()`, or convert it to a function with `() => m()`. + +The method value syntax `m _` is deprecated. + +## Reference + +For more information, see [PR #2701](https://github.com/lampepfl/dotty/pull/2701). diff --git a/docs/_spec/TODOreference/changed-features/eta-expansion.md b/docs/_spec/TODOreference/changed-features/eta-expansion.md new file mode 100644 index 000000000000..c05378135e54 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/eta-expansion.md @@ -0,0 +1,42 @@ +--- +layout: doc-page +title: "Automatic Eta Expansion" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/eta-expansion.html +--- + +The conversion of _methods_ into _functions_ has been improved and happens automatically for methods with one or more parameters. + +```scala +def m(x: Boolean, y: String)(z: Int): List[Int] +val f1 = m +val f2 = m(true, "abc") +``` + +This creates two function values: +```scala +f1: (Boolean, String) => Int => List[Int] +f2: Int => List[Int] +``` + +The syntax `m _` is no longer needed and will be deprecated in the future. + +## Automatic eta-expansion and nullary methods + +Automatic eta expansion does not apply to "nullary" methods that take an empty parameter list. + +```scala +def next(): T +``` + +Given a simple reference to `next` does not auto-convert to a function. +One has to write explicitly `() => next()` to achieve that. +Once again since the `_` is going to be deprecated it's better to write it this way +rather than `next _`. + +The reason for excluding nullary methods from automatic eta expansion +is that Scala implicitly inserts the `()` argument, which would +conflict with eta expansion. Automatic `()` insertion is +[limited](../dropped-features/auto-apply.md) in Scala 3, but the fundamental ambiguity +remains. + +[More details](eta-expansion-spec.md) diff --git a/docs/_spec/TODOreference/changed-features/implicit-conversions-spec.md b/docs/_spec/TODOreference/changed-features/implicit-conversions-spec.md new file mode 100644 index 000000000000..dc19e10c8b8f --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/implicit-conversions-spec.md @@ -0,0 +1,117 @@ +--- +layout: doc-page +title: "Implicit Conversions - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-conversions-spec.html +--- + +## Implementation + +An implicit conversion, or _view_, from type `S` to type `T` is +defined by either: + +- An `implicit def` which has type `S => T` or `(=> S) => T` +- An implicit value which has type `Conversion[S, T]` + +The standard library defines an abstract class [`Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html): + +```scala +package scala +@java.lang.FunctionalInterface +abstract class Conversion[-T, +U] extends Function1[T, U]: + def apply(x: T): U +``` + +Function literals are automatically converted to `Conversion` values. + +Views are applied in three situations: + +1. If an expression `e` is of type `T`, and `T` does not conform to + the expression's expected type `pt`. In this case, an implicit `v` + which is applicable to `e` and whose result type conforms to `pt` + is searched. The search proceeds as in the case of implicit + parameters, where the implicit scope is the one of `T => pt`. If + such a view is found, the expression `e` is converted to `v(e)`. +1. In a selection `e.m` with `e` of type `T`, if the selector `m` does + not denote an accessible member of `T`. In this case, a view `v` + which is applicable to `e` and whose result contains an accessible + member named `m` is searched. The search proceeds as in the case of + implicit parameters, where the implicit scope is the one of `T`. If + such a view is found, the selection `e.m` is converted to `v(e).m`. +1. In an application `e.m(args)` with `e` of type `T`, if the selector + `m` denotes some accessible member(s) of `T`, but none of these + members is applicable to the arguments `args`. In this case, a view + `v` which is applicable to `e` and whose result contains a method + `m` which is applicable to `args` is searched. The search proceeds + as in the case of implicit parameters, where the implicit scope is + the one of `T`. If such a view is found, the application + `e.m(args)` is converted to `v(e).m(args)`. + +# Differences with Scala 2 implicit conversions + +In Scala 2, views whose parameters are passed by-value take precedence +over views whose parameters are passed by-name. This is no longer the +case in Scala 3. A type error reporting the ambiguous conversions will +be emitted in cases where this rule would be applied in Scala 2: + +```scala +implicit def conv1(x: Int): String = x.toString +implicit def conv2(x: => Int): String = x.toString + +val x: String = 0 // Compiles in Scala2 (uses `conv1`), + // type error in Scala 3 because of ambiguity. +``` + +In Scala 2, implicit values of a function type would be considered as +potential views. In Scala 3, these implicit value need to have type +`Conversion`: + +```scala +// Scala 2: +def foo(x: Int)(implicit conv: Int => String): String = x + +// Becomes with Scala 3: +def foo(x: Int)(implicit conv: Conversion[Int, String]): String = x + +// Call site is unchanged: +foo(4)(_.toString) + +// Scala 2: +implicit val myConverter: Int => String = _.toString + +// Becomes with Scala 3: +implicit val myConverter: Conversion[Int, String] = _.toString +``` + +Note that implicit conversions are also affected by the [changes to implicit resolution](implicit-resolution.md) between Scala 2 and Scala 3. + +## Motivation for the changes + +The introduction of [`scala.Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html) +in Scala 3 and the decision to restrict implicit values of this type to be +considered as potential views comes from the desire to remove surprising +behavior from the language: + +```scala +implicit val m: Map[Int, String] = Map(1 -> "abc") + +val x: String = 1 // Scala 2: assigns "abc" to x + // Scala 3: type error +``` + +This snippet contains a type error. The right-hand side of `val x` +does not conform to type `String`. In Scala 2, the compiler will use +`m` as an implicit conversion from `Int` to `String`, whereas Scala 3 +will report a type error, because `Map` isn't an instance of +[`Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html). + +## Migration path + +Implicit values that are used as views should see their type changed to `Conversion`. + +For the migration of implicit conversions that are affected by the +changes to implicit resolution, refer to the [Changes in Implicit Resolution](implicit-resolution.md) for more information. + +## Reference + +For more information about implicit resolution, see [Changes in Implicit Resolution](implicit-resolution.md). +Other details are available in [PR #2065](https://github.com/lampepfl/dotty/pull/2065). diff --git a/docs/_spec/TODOreference/changed-features/implicit-conversions.md b/docs/_spec/TODOreference/changed-features/implicit-conversions.md new file mode 100644 index 000000000000..eef236f39a07 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/implicit-conversions.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "Implicit Conversions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-conversions.html +--- + +An _implicit conversion_, also called _view_, is a conversion that +is applied by the compiler in several situations: + +1. When an expression `e` of type `T` is encountered, but the compiler + needs an expression of type `S`. +1. When an expression `e.m` where `e` has type `T` but `T` defines no + member `m` is encountered. + +In those cases, the compiler looks in the implicit scope for a +conversion that can convert an expression of type `T` to an expression +of type `S` (or to a type that defines a member `m` in the second +case). + +This conversion can be either: + +1. An `implicit def` of type `T => S` or `(=> T) => S` +1. An implicit value of type `scala.Conversion[T, S]` + +Defining an implicit conversion will emit a warning unless the import +`scala.language.implicitConversions` is in scope, or the flag +`-language:implicitConversions` is given to the compiler. + +## Examples + +The first example is taken from [`scala.Predef`](https://scala-lang.org/api/3.x/scala/Predef$.html). +Thanks to this implicit conversion, it is possible to pass a +[`scala.Int`](https://scala-lang.org/api/3.x/scala/Int.html) +to a Java method that expects a `java.lang.Integer` + +```scala +import scala.language.implicitConversions +implicit def int2Integer(x: Int): java.lang.Integer = + x.asInstanceOf[java.lang.Integer] +``` + +The second example shows how to use `Conversion` to define an +`Ordering` for an arbitrary type, given existing `Ordering`s for other +types: + +```scala +import scala.language.implicitConversions +implicit def ordT[T, S]( + implicit conv: Conversion[T, S], + ordS: Ordering[S] + ): Ordering[T] = + // `ordS` compares values of type `S`, but we can convert from `T` to `S` + (x: T, y: T) => ordS.compare(x, y) + +class A(val x: Int) // The type for which we want an `Ordering` + +// Convert `A` to a type for which an `Ordering` is available: +implicit val AToInt: Conversion[A, Int] = _.x + +implicitly[Ordering[Int]] // Ok, exists in the standard library +implicitly[Ordering[A]] // Ok, will use the implicit conversion from + // `A` to `Int` and the `Ordering` for `Int`. +``` + +[More details](implicit-conversions-spec.md) diff --git a/docs/_spec/TODOreference/changed-features/implicit-resolution.md b/docs/_spec/TODOreference/changed-features/implicit-resolution.md new file mode 100644 index 000000000000..bf15baa3299c --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/implicit-resolution.md @@ -0,0 +1,169 @@ +--- +layout: doc-page +title: "Changes in Implicit Resolution" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-resolution.html +--- + +This section describes changes to the implicit resolution that apply both to the new `given`s and to the old-style `implicit`s in Scala 3. +Implicit resolution uses a new algorithm which caches implicit results +more aggressively for performance. There are also some changes that +affect implicits on the language level. + +**1.** Types of implicit values and result types of implicit methods +must be explicitly declared. Excepted are only values in local blocks +where the type may still be inferred: +```scala + class C { + + val ctx: Context = ... // ok + + /*!*/ implicit val x = ... // error: type must be given explicitly + + /*!*/ implicit def y = ... // error: type must be given explicitly + } + val y = { + implicit val ctx = this.ctx // ok + ... + } +``` +**2.** Nesting is now taken into account for selecting an implicit. Consider for instance the following scenario: +```scala + def f(implicit i: C) = { + def g(implicit j: C) = { + implicitly[C] + } + } +``` +This will now resolve the `implicitly` call to `j`, because `j` is nested +more deeply than `i`. Previously, this would have resulted in an +ambiguity error. The previous possibility of an implicit search failure +due to _shadowing_ (where an implicit is hidden by a nested definition) +no longer applies. + +**3.** Package prefixes no longer contribute to the implicit search scope of a type. Example: +```scala + package p + + given a: A = A() + + object o: + given b: B = B() + type C +``` +Both `a` and `b` are visible as implicits at the point of the definition +of `type C`. However, a reference to `p.o.C` outside of package `p` will +have only `b` in its implicit search scope but not `a`. + +In more detail, here are the rules for what constitutes the implicit scope of +a type: + +**Definition:** A reference is an _anchor_ if it refers to an object, a class, a trait, an abstract type, an opaque type alias, or a match type alias. References to packages and package objects are anchors only under `-source:3.0-migration`. +Opaque type aliases count as anchors only outside the scope where their alias is visible. + +**Definition:** The _anchors_ of a type _T_ is a set of references defined as follows: + + 1. If _T_ is a reference to an anchor, _T_ itself plus, if _T_ is of the form _P#A_, the anchors of _P_. + 1. If _T_ is an alias of _U_, the anchors of _U_. + 1. If _T_ is a reference to a type parameter, the union of the anchors of both of its bounds. + 1. If _T_ is a singleton reference, the anchors of its underlying type, plus, + if _T_ is of the form _(P#x).type_, the anchors of _P_. + 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object. + 1. If _T_ is some other type, the union of the anchors of each constituent type of _T_. + + **Definition:** The _implicit scope_ of a type _T_ is the smallest set _S_ of term references such that + + 1. If _T_ is a reference to a class, _S_ includes a reference to the companion object + of the class, if it exists, as well as the implicit scopes of all of _T_'s parent classes. + 1. If _T_ is a reference to an object, _S_ includes _T_ itself as well as + the implicit scopes of all of _T_'s parent classes. + 1. If _T_ is a reference to an opaque type alias named _A_, _S_ includes + a reference to an object _A_ defined in the same scope as the type, if it exists, + as well as the implicit scope of _T_'s underlying type or bounds. + 1. If _T_ is a reference to an abstract type or match type alias + named _A_, _S_ includes a reference to an object _A_ defined in the same scope as the type, if it exists, as well as the implicit scopes of _T_'s given bounds. + 1. If _T_ is a reference to an anchor of the form _p.A_ then _S_ also includes + all term references on the path _p_. + 1. If _T_ is some other type, _S_ includes the implicit scopes of all anchors of _T_. + + +**4.** The treatment of ambiguity errors has changed. If an ambiguity is encountered in some recursive step of an implicit search, the ambiguity is propagated to the caller. + +Example: Say you have the following definitions: +```scala + class A + class B extends C + class C + implicit def a1: A + implicit def a2: A + implicit def b(implicit a: A): B + implicit def c: C +``` +and the query `implicitly[C]`. + +This query would now be classified as ambiguous. This makes sense, after all +there are two possible solutions, `b(a1)` and `b(a2)`, neither of which is better +than the other and both of which are better than the third solution, `c`. +By contrast, Scala 2 would have rejected the search for `A` as +ambiguous, and subsequently have classified the query `b(implicitly[A])` as a normal fail, +which means that the alternative `c` would be chosen as solution! + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement +the analogue of a "negated" search in implicit resolution, where a query `Q1` fails if some +other query `Q2` succeeds and `Q1` succeeds if `Q2` fails. With the new cleaned up behavior +these techniques no longer work. But there is now a new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) +which implements negation directly. For any query type `Q`, `NotGiven[Q]` succeeds if and only if +the implicit search for `Q` fails. + +**5.** The treatment of divergence errors has also changed. A divergent implicit is treated as a normal failure, after which alternatives are still tried. This also makes sense: Encountering a divergent implicit means that we assume that no finite solution can be found on the corresponding path, but another path can still be tried. By contrast, +most (but not all) divergence errors in Scala 2 would terminate the implicit search as a whole. + +**6.** Scala 2 gives a lower level of priority to implicit conversions with call-by-name parameters relative to implicit conversions with call-by-value parameters. Scala 3 drops this distinction. So the following code snippet would be ambiguous in Scala 3: + +```scala + implicit def conv1(x: Int): A = new A(x) + implicit def conv2(x: => Int): A = new A(x) + def buzz(y: A) = ??? + buzz(1) // error: ambiguous +``` +**7.** The rule for picking a _most specific_ alternative among a set of overloaded or implicit alternatives is refined to take context parameters into account. All else being equal, an alternative that takes some context parameters is taken to be less specific than an alternative that takes none. If both alternatives take context parameters, we try to choose between them as if they were methods with regular parameters. The following paragraph in the [SLS §6.26.3](https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#overloading-resolution) is affected by this change: + +_Original version:_ + +> An alternative A is _more specific_ than an alternative B if the relative weight of A over B is greater than the relative weight of B over A. + +_Modified version:_ + +An alternative A is _more specific_ than an alternative B if + + - the relative weight of A over B is greater than the relative weight of B over A, or + - the relative weights are the same, and A takes no implicit parameters but B does, or + - the relative weights are the same, both A and B take implicit parameters, and A is more specific than B if all implicit parameters in either alternative are replaced by regular parameters. + +**8.** The previous disambiguation of implicits based on inheritance depth is refined to make it transitive. Transitivity is important to guarantee that search outcomes are compilation-order independent. Here's a scenario where the previous rules violated transitivity: +```scala + class A extends B + object A { given a ... } + class B + object B extends C { given b ... } + class C { given c } +``` + Here `a` is more specific than `b` since the companion class `A` is a subclass of the companion class `B`. Also, `b` is more specific than `c` + since `object B` extends class `C`. But `a` is not more specific than `c`. This means if `a, b, c` are all applicable implicits, it makes + a difference in what order they are compared. If we compare `b` and `c` + first, we keep `b` and drop `c`. Then, comparing `a` with `b` we keep `a`. But if we compare `a` with `c` first, we fail with an ambiguity error. + +The new rules are as follows: An implicit `a` defined in `A` is more specific than an implicit `b` defined in `B` if + + - `A` extends `B`, or + - `A` is an object and the companion class of `A` extends `B`, or + - `A` and `B` are objects, + `B` does not inherit any implicit members from base classes (*), + and the companion class of `A` extends the companion class of `B`. + +Condition (*) is new. It is necessary to ensure that the defined relation is transitive. + + + + + +[//]: # todo: expand with precise rules diff --git a/docs/_spec/TODOreference/changed-features/imports.md b/docs/_spec/TODOreference/changed-features/imports.md new file mode 100644 index 000000000000..2058ef08b7db --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/imports.md @@ -0,0 +1,60 @@ +--- +layout: doc-page +title: "Imports" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/imports.html +--- + +The syntax of wildcard and renaming imports (and exports) has changed. + +## Wildcard Imports + +Wildcard imports are now expressed with `*` instead of underscore. Example: +```scala +import scala.annotation.* // imports everything in the annotation package +``` + +If you want to import a member named `*` specifically, you can use backticks around it. + +```scala +object A: + def * = ... + def min = ... + +object B: + import A.`*` // imports just `*` + +object C: + import A.* // imports everything in A +``` + +## Renaming Imports + +To rename or exclude an import, we now use `as` instead of `=>`. A single renaming import no longer needs to be enclosed in braces. Examples: + +```scala +import A.{min as minimum, `*` as multiply} +import Predef.{augmentString as _, *} // imports everything except augmentString +import scala.annotation as ann +import java as j +``` + +## Migration + +To support cross-building, Scala 3.0 supports the old import syntax with `_` for wildcards and `=>` for renamings in addition to the new one. The old syntax +will be dropped in a future versions. Automatic rewritings from old to new syntax +are offered under settings `-source 3.1-migration -rewrite`. + +## Syntax + +``` +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec + | SimpleRef `as` id +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*' | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +``` diff --git a/docs/_spec/TODOreference/changed-features/interpolation-escapes.md b/docs/_spec/TODOreference/changed-features/interpolation-escapes.md new file mode 100644 index 000000000000..594e7671c5ab --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/interpolation-escapes.md @@ -0,0 +1,14 @@ +--- +layout: doc-page +title: "Escapes in interpolations" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/interpolation-escapes.html +--- + +In Scala 2 there is no straightforward way to represent a single quote character `"` in a single quoted interpolation. A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. + +In Scala 3, we can use the `$` meta character of interpolations to escape a `"` character. Example: + +```scala + val inventor = "Thomas Edison" + val interpolation = s"as $inventor said: $"The three great essentials to achieve anything worth while are: Hard work, Stick-to-itiveness, and Common sense.$"" +``` diff --git a/docs/_spec/TODOreference/changed-features/lazy-vals-init.md b/docs/_spec/TODOreference/changed-features/lazy-vals-init.md new file mode 100644 index 000000000000..131ac6ad7bb2 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/lazy-vals-init.md @@ -0,0 +1,80 @@ +--- +layout: doc-page +title: Lazy Vals Initialization +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/lazy-vals-init.html +--- + +Scala 3 implements [Version 6](https://docs.scala-lang.org/sips/improved-lazy-val-initialization.html#version-6---no-synchronization-on-this-and-concurrent-initialization-of-fields) +of the [SIP-20] improved lazy vals initialization proposal. + +## Motivation + +The newly proposed lazy val initialization mechanism aims to eliminate the acquisition of resources +during the execution of the lazy val initializer block, thus reducing the possibility of a deadlock. +The concrete deadlock scenarios that the new lazy val initialization scheme eliminates are +summarized in the [SIP-20] document. + +## Implementation + +Given a lazy field of the form: + +```scala +class Foo { + lazy val bar = +} +``` + +The Scala 3 compiler will generate code equivalent to: + +```scala +class Foo { + import scala.runtime.LazyVals + var value_0: Int = _ + var bitmap: Long = 0L + val bitmap_offset: Long = LazyVals.getOffset(classOf[LazyCell], "bitmap") + + def bar(): Int = { + while (true) { + val flag = LazyVals.get(this, bitmap_offset) + val state = LazyVals.STATE(flag, ) + + if (state == ) { + return value_0 + } else if (state == ) { + if (LazyVals.CAS(this, bitmap_offset, flag, , )) { + try { + val result = + value_0 = result + LazyVals.setFlag(this, bitmap_offset, , ) + return result + } + catch { + case ex => + LazyVals.setFlag(this, bitmap_offset, , ) + throw ex + } + } + } else /* if (state == || state == ) */ { + LazyVals.wait4Notification(this, bitmap_offset, flag, ) + } + } + } +} +``` + +The state of the lazy val `` is represented with 4 values: 0, 1, 2 and 3. The state 0 +represents a non-initialized lazy val. The state 1 represents a lazy val that is currently being +initialized by some thread. The state 2 denotes that there are concurrent readers of the lazy val. +The state 3 represents a lazy val that has been initialized. `` is the id of the lazy +val. This id grows with the number of volatile lazy vals defined in the class. + +## Note on recursive lazy vals + +Ideally recursive lazy vals should be flagged as an error. The current behavior for +recursive lazy vals is undefined (initialization may result in a deadlock). + +## Reference + +* [SIP-20] + +[SIP-20]: https://docs.scala-lang.org/sips/improved-lazy-val-initialization.html diff --git a/docs/_spec/TODOreference/changed-features/main-functions.md b/docs/_spec/TODOreference/changed-features/main-functions.md new file mode 100644 index 000000000000..4460300d003e --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/main-functions.md @@ -0,0 +1,87 @@ +--- +layout: doc-page +title: "Main Methods" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/main-functions.html +--- + +Scala 3 offers a new way to define programs that can be invoked from the command line: +A [`@main`](https://scala-lang.org/api/3.x/scala/main.html) annotation on a method turns this method into an executable program. +Example: + +```scala +@main def happyBirthday(age: Int, name: String, others: String*) = + val suffix = + age % 100 match + case 11 | 12 | 13 => "th" + case _ => + age % 10 match + case 1 => "st" + case 2 => "nd" + case 3 => "rd" + case _ => "th" + val bldr = new StringBuilder(s"Happy $age$suffix birthday, $name") + for other <- others do bldr.append(" and ").append(other) + bldr.toString +``` + +This would generate a main program `happyBirthday` that could be called like this + +``` +> scala happyBirthday 23 Lisa Peter +Happy 23rd birthday, Lisa and Peter +``` + +A [`@main`](https://scala-lang.org/api/3.x/scala/main.html) annotated method can be written either at the top-level or in a statically accessible object. The name of the program is in each case the name of the method, without any object prefixes. The [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method can have an arbitrary number of parameters. +For each parameter type there must be an instance of the [`scala.util.CommandLineParser.FromString[T]`](https://scala-lang.org/api/3.x/scala/util/CommandLineParser$$FromString.html) type class that is used to convert an argument string to the required parameter type `T`. +The parameter list of a main method can end in a repeated parameter that then takes all remaining arguments given on the command line. + +The program implemented from a [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method checks that there are enough arguments on +the command line to fill in all parameters, and that argument strings are convertible to +the required types. If a check fails, the program is terminated with an error message. + +Examples: + +``` +> scala happyBirthday 22 +Illegal command line after first argument: more arguments expected + +> scala happyBirthday sixty Fred +Illegal command line: java.lang.NumberFormatException: For input string: "sixty" +``` + +The Scala compiler generates a program from a [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method `f` as follows: + + - It creates a class named `f` in the package where the [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method was found + - The class has a static method `main` with the usual signature. It takes an `Array[String]` + as argument and returns [`Unit`](https://scala-lang.org/api/3.x/scala/Unit.html). + - The generated `main` method calls method `f` with arguments converted using + methods in the [`scala.util.CommandLineParser`](https://scala-lang.org/api/3.x/scala/util/CommandLineParser$.html) object. + +For instance, the `happyBirthDay` method above would generate additional code equivalent to the following class: + +```scala +final class happyBirthday: + import scala.util.CommandLineParser as CLP + def main(args: Array[String]): Unit = + try + happyBirthday( + CLP.parseArgument[Int](args, 0), + CLP.parseArgument[String](args, 1), + CLP.parseRemainingArguments[String](args, 2)) + catch + case error: CLP.ParseError => CLP.showError(error) +``` + +**Note**: The `` modifier above expresses that the `main` method is generated +as a static method of class `happyBirthDay`. It is not available for user programs in Scala. Regular "static" members are generated in Scala using objects instead. + +[`@main`](https://scala-lang.org/api/3.x/scala/main.html) methods are the recommended scheme to generate programs that can be invoked from the command line in Scala 3. They replace the previous scheme to write program as objects with a special `App` parent class. In Scala 2, `happyBirthday` could be written also like this: + +```scala +object happyBirthday extends App: + // needs by-hand parsing of arguments vector + ... +``` + +The previous functionality of [`App`](https://www.scala-lang.org/api/3.x/scala/App.html), which relied on the "magic" [`DelayedInit`](../dropped-features/delayed-init.md) trait, is no longer available. [`App`](https://scala-lang.org/api/3.x/scala/App.html) still exists in limited form for now, but it does not support command line arguments and will be deprecated in the future. If programs need to cross-build +between Scala 2 and Scala 3, it is recommended to use an explicit `main` method with an `Array[String]` argument instead. diff --git a/docs/_spec/TODOreference/changed-features/match-syntax.md b/docs/_spec/TODOreference/changed-features/match-syntax.md new file mode 100644 index 000000000000..dba50e9beb6a --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/match-syntax.md @@ -0,0 +1,56 @@ +--- +layout: doc-page +title: "Match Expressions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/match-syntax.html +--- + +The syntactical precedence of match expressions has been changed. +`match` is still a keyword, but it is used like an alphabetical operator. This has several consequences: + + 1. `match` expressions can be chained: + + ```scala + xs match { + case Nil => "empty" + case _ => "nonempty" + } match { + case "empty" => 0 + case "nonempty" => 1 + } + ``` + + (or, dropping the optional braces) + + ```scala + xs match + case Nil => "empty" + case _ => "nonempty" + match + case "empty" => 0 + case "nonempty" => 1 + ``` + + 2. `match` may follow a period: + + ```scala + if xs.match + case Nil => false + case _ => true + then "nonempty" + else "empty" + ``` + + 3. The scrutinee of a match expression must be an `InfixExpr`. Previously the scrutinee could be followed by a type ascription `: T`, but this is no longer supported. So `x : T match { ... }` now has to be + written `(x: T) match { ... }`. + +## Syntax + +The new syntax of match expressions is as follows. + +``` +InfixExpr ::= ... + | InfixExpr MatchClause +SimpleExpr ::= ... + | SimpleExpr ‘.’ MatchClause +MatchClause ::= ‘match’ ‘{’ CaseClauses ‘}’ +``` diff --git a/docs/_spec/TODOreference/changed-features/numeric-literals.md b/docs/_spec/TODOreference/changed-features/numeric-literals.md new file mode 100644 index 000000000000..bba837dbf67d --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/numeric-literals.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Numeric Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/numeric-literals.html +--- + +[Document was moved](../experimental/numeric-literals.md) diff --git a/docs/_spec/TODOreference/changed-features/operators.md b/docs/_spec/TODOreference/changed-features/operators.md new file mode 100644 index 000000000000..0cf25d77bc11 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/operators.md @@ -0,0 +1,173 @@ +--- +layout: doc-page +title: "Rules for Operators" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/operators.html +--- + +The rules for infix operators have changed in some parts: + +First, an alphanumeric method can be used as an infix operator only if its definition carries an `infix` modifier. + +Second, it is recommended (but not enforced) to augment definitions of symbolic operators +with [`@targetName` annotations](../other-new-features/targetName.md). + +Finally, a syntax change allows infix operators to be written on the left in a multi-line expression. + +## The `infix` Modifier + +An `infix` modifier on a method definition allows using the method as an infix operation. Example: + +```scala +import scala.annotation.targetName + +trait MultiSet[T]: + + infix def union(other: MultiSet[T]): MultiSet[T] + + def difference(other: MultiSet[T]): MultiSet[T] + + @targetName("intersection") + def *(other: MultiSet[T]): MultiSet[T] + +end MultiSet + +val s1, s2: MultiSet[Int] + +s1 union s2 // OK +s1 `union` s2 // also OK but unusual +s1.union(s2) // also OK + +s1.difference(s2) // OK +s1 `difference` s2 // OK +s1 difference s2 // gives a deprecation warning + +s1 * s2 // OK +s1 `*` s2 // also OK, but unusual +s1.*(s2) // also OK, but unusual +``` + +Infix operations involving alphanumeric operators are deprecated, unless +one of the following conditions holds: + + - the operator definition carries an `infix` modifier, or + - the operator was compiled with Scala 2, or + - the operator is followed by an opening brace. + +An alphanumeric operator is an operator consisting entirely of letters, digits, the `$` and `_` characters, or +any Unicode character `c` for which `java.lang.Character.isIdentifierPart(c)` returns `true`. + +Infix operations involving symbolic operators are always allowed, so `infix` is redundant for methods with symbolic names. + +The `infix` modifier can also be given to a type: + +```scala +infix type or[X, Y] +val x: String or Int = ... +``` + +### Motivation + +The purpose of the `infix` modifier is to achieve consistency across a code base in how a method or type is applied. The idea is that the author of a method decides whether that method should be applied as an infix operator or in a regular application. Use sites then implement that decision consistently. + +### Details + + 1. `infix` is a soft modifier. It is treated as a normal identifier except when in modifier position. + + 2. If a method overrides another, their infix annotations must agree. Either both are annotated with `infix`, or none of them are. + + 3. `infix` modifiers can be given to method definitions. The first non-receiver parameter list of an `infix` method must define exactly one parameter. Examples: + + ```scala + infix def op1(x: S): R // ok + infix def op2[T](x: T)(y: S): R // ok + infix def op3[T](x: T, y: S): R // error: two parameters + + extension (x: A) + infix def op4(y: B): R // ok + infix def op5(y1: B, y2: B): R // error: two parameters + ``` + + 4. `infix` modifiers can also be given to type, trait or class definitions that have exactly two type parameters. An infix type like + + ```scala + infix type op[X, Y] + ``` + + can be applied using infix syntax, i.e. `A op B`. + + 5. To smooth migration to Scala 3.0, alphanumeric operators will only be deprecated from Scala 3.1 onwards, +or if the `-source future` option is given in Dotty/Scala 3. + +## The `@targetName` Annotation + +It is recommended that definitions of symbolic operators carry a [`@targetName` annotation](../other-new-features/targetName.md) that provides an encoding of the operator with an alphanumeric name. This has several benefits: + + - It helps interoperability between Scala and other languages. One can call + a Scala-defined symbolic operator from another language using its target name, + which avoids having to remember the low-level encoding of the symbolic name. + - It helps legibility of stacktraces and other runtime diagnostics, where the + user-defined alphanumeric name will be shown instead of the low-level encoding. + - It serves as a documentation tool by providing an alternative regular name + as an alias of a symbolic operator. This makes the definition also easier + to find in a search. + +## Syntax Change + +Infix operators can now appear at the start of lines in a multi-line expression. Examples: + +```scala +val str = "hello" + ++ " world" + ++ "!" + +def condition = + x > 0 + || + xs.exists(_ > 0) + || xs.isEmpty +``` + +Previously, those expressions would have been rejected, since the compiler's semicolon inference +would have treated the continuations `++ " world"` or `|| xs.isEmpty` as separate statements. + +To make this syntax work, the rules are modified to not infer semicolons in front of leading infix operators. +A _leading infix operator_ is + - a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks that + - starts a new line, and + - is not following a blank line, and + - is followed by at least one whitespace character and a token that can start an expression. + - Furthermore, if the operator appears on its own line, the next line must have at least + the same indentation width as the operator. + +Example: + +```scala + freezing + | boiling +``` + +This is recognized as a single infix operation. Compare with: + +```scala + freezing + !boiling +``` + +This is seen as two statements, `freezing` and `!boiling`. The difference is that only the operator in the first example +is followed by a space. + +Another example: + +```scala + println("hello") + ??? + ??? match { case 0 => 1 } +``` + +This code is recognized as three different statements. `???` is syntactically a symbolic identifier, but +neither of its occurrences is followed by a space and a token that can start an expression. + +## Unary operators + +A unary operator must not have explicit parameter lists even if they are empty. +A unary operator is a method named "unary_`op`" where `op` is one of `+`, `-`, `!`, or `~`. diff --git a/docs/_spec/TODOreference/changed-features/overload-resolution.md b/docs/_spec/TODOreference/changed-features/overload-resolution.md new file mode 100644 index 000000000000..621515c2a7f8 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/overload-resolution.md @@ -0,0 +1,102 @@ +--- +layout: doc-page +title: "Changes in Overload Resolution" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/overload-resolution.html +--- + +Overload resolution in Scala 3 improves on Scala 2 in three ways. +First, it takes all argument lists into account instead of +just the first argument list. +Second, it can infer parameter types of function values even if they +are in the first argument list. +Third, default arguments are no longer relevant for prioritization. + +## Looking Beyond the First Argument List + +Overloading resolution now can take argument lists into account when +choosing among a set of overloaded alternatives. +For example, the following code compiles in Scala 3, while it results in an +ambiguous overload error in Scala 2: + +```scala +def f(x: Int)(y: String): Int = 0 +def f(x: Int)(y: Int): Int = 0 + +f(3)("") // ok +``` + +The following code compiles as well: + +```scala +def g(x: Int)(y: Int)(z: Int): Int = 0 +def g(x: Int)(y: Int)(z: String): Int = 0 + +g(2)(3)(4) // ok +g(2)(3)("") // ok +``` + +To make this work, the rules for overloading resolution in [SLS §6.26.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#overloading-resolution) are augmented +as follows: + +> In a situation where a function is applied to more than one argument list, if overloading +resolution yields several competing alternatives when `n >= 1` parameter lists are taken +into account, then resolution re-tried using `n + 1` argument lists. + +This change is motivated by the new language feature +[extension methods](../contextual/extension-methods.md), where emerges the need to do +overload resolution based on additional argument blocks. + +## Parameter Types of Function Values + +The handling of function values with missing parameter types has been improved. We can now +pass such values in the first argument list of an overloaded application, provided +that the remaining parameters suffice for picking a variant of the overloaded function. +For example, the following code compiles in Scala 3, while it results in a +missing parameter type error in Scala2: + +```scala +def f(x: Int, f2: Int => Int) = f2(x) +def f(x: String, f2: String => String) = f2(x) +f("a", _.toUpperCase) +f(2, _ * 2) +``` + +To make this work, the rules for overloading resolution in [SLS §6.26.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#overloading-resolution) are modified +as follows: + +Replace the sentence + +> Otherwise, let `S1,...,Sm` be the vector of types obtained by typing each argument with an undefined expected type. + +with the following paragraph: + +> Otherwise, let `S1,...,Sm` be the vector of known types of all argument types, where the _known type_ of an argument `E` +is determined as followed: + + - If `E` is a function value `(p_1, ..., p_n) => B` that misses some parameter types, the known type + of `E` is `(S_1, ..., S_n) => ?`, where each `S_i` is the type of parameter `p_i` if it is given, or `?` + otherwise. Here `?` stands for a _wildcard type_ that is compatible with every other type. + - Otherwise the known type of `E` is the result of typing `E` with an undefined expected type. + +A pattern matching closure + +```scala +{ case P1 => B1 ... case P_n => B_n } +```` + +is treated as if it was expanded to the function value + +```scala +x => x match { case P1 => B1 ... case P_n => B_n } +``` + +and is therefore also approximated with a `? => ?` type. + +## Default Arguments Are No longer Relevant for Prioritization + +In Scala 2 if among several applicative alternatives one alternative had default arguments, that alternative was dropped from consideration. This has the unfortunate +side effect that adding a default to a parameter of a method can render this method +invisible in overloaded calls. + +Scala 3 drops this distinction. Methods with default parameters are not treated +to have lower priority than other methods. diff --git a/docs/_spec/TODOreference/changed-features/pattern-bindings.md b/docs/_spec/TODOreference/changed-features/pattern-bindings.md new file mode 100644 index 000000000000..2de338fc1dde --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/pattern-bindings.md @@ -0,0 +1,59 @@ +--- +layout: doc-page +title: "Pattern Bindings" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html +--- + +In Scala 2, pattern bindings in `val` definitions and `for` expressions are +loosely typed. Potentially failing matches are still accepted at compile-time, +but may influence the program's runtime behavior. +From Scala 3.2 on, type checking rules will be tightened so that warnings are reported at compile-time instead. + +## Bindings in Pattern Definitions + +```scala +val xs: List[Any] = List(1, 2, 3) +val (x: String) :: _ = xs // error: pattern's type String is more specialized + // than the right-hand side expression's type Any +``` +This code gives a compile-time warning in Scala 3.2 (and also earlier Scala 3.x under the `-source future` setting) whereas it will fail at runtime with a `ClassCastException` in Scala 2. In Scala 3.2, a pattern binding is only allowed if the pattern is _irrefutable_, that is, if the right-hand side's type conforms to the pattern's type. For instance, the following is OK: +```scala +val pair = (1, true) +val (x, y) = pair +``` +Sometimes one wants to decompose data anyway, even though the pattern is refutable. For instance, if at some point one knows that a list `elems` is non-empty one might want to decompose it like this: +```scala +val first :: rest = elems // error +``` +This works in Scala 2. In fact it is a typical use case for Scala 2's rules. But in Scala 3.2 it will give a warning. One can avoid the warning by marking the right-hand side with an [`@unchecked`](https://scala-lang.org/api/3.x/scala/unchecked.html) annotation: +```scala +val first :: rest = elems: @unchecked // OK +``` +This will make the compiler accept the pattern binding. It might give an error at runtime instead, if the underlying assumption that `elems` can never be empty is wrong. + +## Pattern Bindings in `for` Expressions + +Analogous changes apply to patterns in `for` expressions. For instance: + +```scala +val elems: List[Any] = List((1, 2), "hello", (3, 4)) +for (x, y) <- elems yield (y, x) // error: pattern's type (Any, Any) is more specialized + // than the right-hand side expression's type Any +``` +This code gives a compile-time warning in Scala 3.2 whereas in Scala 2 the list `elems` +is filtered to retain only the elements of tuple type that match the pattern `(x, y)`. +The filtering functionality can be obtained in Scala 3 by prefixing the pattern with `case`: +```scala +for case (x, y) <- elems yield (y, x) // returns List((2, 1), (4, 3)) +``` + +## Syntax Changes + +Generators in for expressions may be prefixed with `case`. +``` +Generator ::= [‘case’] Pattern1 ‘<-’ Expr +``` + +## Migration + +The new syntax is supported in Scala 3.0. However, to enable smooth cross compilation between Scala 2 and Scala 3, the changed behavior and additional type checks are only enabled under the `-source future` setting. They will be enabled by default in version 3.2 of the language. diff --git a/docs/_spec/TODOreference/changed-features/pattern-matching.md b/docs/_spec/TODOreference/changed-features/pattern-matching.md new file mode 100644 index 000000000000..30ae5d9dc104 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/pattern-matching.md @@ -0,0 +1,243 @@ +--- +layout: doc-page +title: "Option-less pattern matching" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html +--- + +The implementation of pattern matching in Scala 3 was greatly simplified compared to Scala 2. From a user perspective, this means that Scala 3 generated patterns are a _lot_ easier to debug, as variables all show up in debug modes and positions are correctly preserved. + +Scala 3 supports a superset of Scala 2 [extractors](https://www.scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#extractor-patterns). + +## Extractors + +Extractors are objects that expose a method `unapply` or `unapplySeq`: + +```scala +def unapply[A](x: T)(implicit x: B): U +def unapplySeq[A](x: T)(implicit x: B): U +``` + +Extractors that expose the method `unapply` are called fixed-arity extractors, which +work with patterns of fixed arity. Extractors that expose the method `unapplySeq` are +called variadic extractors, which enables variadic patterns. + +### Fixed-Arity Extractors + +Fixed-arity extractors expose the following signature: + +```scala +def unapply[A](x: T)(implicit x: B): U +``` + +The type `U` conforms to one of the following matches: + +- Boolean match +- Product match + +Or `U` conforms to the type `R`: + +```scala +type R = { + def isEmpty: Boolean + def get: S +} +``` + +and `S` conforms to one of the following matches: + +- single match +- name-based match + +The former form of `unapply` has higher precedence, and _single match_ has higher +precedence over _name-based match_. + +A usage of a fixed-arity extractor is irrefutable if one of the following condition holds: + +- `U = true` +- the extractor is used as a product match +- `U = Some[T]` (for Scala 2 compatibility) +- `U <: R` and `U <: { def isEmpty: false }` + +### Variadic Extractors + +Variadic extractors expose the following signature: + +```scala +def unapplySeq[A](x: T)(implicit x: B): U +``` + +The type `U` conforms to one of the following matches: + +- sequence match +- product-sequence match + +Or `U` conforms to the type `R`: + +```scala +type R = { + def isEmpty: Boolean + def get: S +} +``` + +and `S` conforms to one of the two matches above. + +The former form of `unapplySeq` has higher priority, and _sequence match_ has higher +precedence over _product-sequence match_. + +A usage of a variadic extractor is irrefutable if one of the following conditions holds: + +- the extractor is used directly as a sequence match or product-sequence match +- `U = Some[T]` (for Scala 2 compatibility) +- `U <: R` and `U <: { def isEmpty: false }` + +## Boolean Match + +- `U =:= Boolean` +- Pattern-matching on exactly `0` patterns + +For example: + + + +```scala +object Even: + def unapply(s: String): Boolean = s.size % 2 == 0 + +"even" match + case s @ Even() => println(s"$s has an even number of characters") + case s => println(s"$s has an odd number of characters") + +// even has an even number of characters +``` + +## Product Match + +- `U <: Product` +- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` + +For example: + + + +```scala +class FirstChars(s: String) extends Product: + def _1 = s.charAt(0) + def _2 = s.charAt(1) + + // Not used by pattern matching: Product is only used as a marker trait. + def canEqual(that: Any): Boolean = ??? + def productArity: Int = ??? + def productElement(n: Int): Any = ??? + +object FirstChars: + def unapply(s: String): FirstChars = new FirstChars(s) + +"Hi!" match + case FirstChars(char1, char2) => + println(s"First: $char1; Second: $char2") + +// First: H; Second: i +``` + +## Single Match + +- If there is exactly `1` pattern, pattern-matching on `1` pattern with type `U` + + + +```scala +class Nat(val x: Int): + def get: Int = x + def isEmpty = x < 0 + +object Nat: + def unapply(x: Int): Nat = new Nat(x) + +5 match + case Nat(n) => println(s"$n is a natural number") + case _ => () + +// 5 is a natural number +``` + +## Name-based Match + +- `N > 1` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1 ... _N: PN` members in `U` +- Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` + +```scala +object ProdEmpty: + def _1: Int = ??? + def _2: String = ??? + def isEmpty = true + def unapply(s: String): this.type = this + def get = this + +"" match + case ProdEmpty(_, _) => ??? + case _ => () +``` + +## Sequence Match + +- `U <: X`, `T2` and `T3` conform to `T1` + +```scala +type X = { + def lengthCompare(len: Int): Int // or, `def length: Int` + def apply(i: Int): T1 + def drop(n: Int): scala.Seq[T2] + def toSeq: scala.Seq[T3] +} +``` + +- Pattern-matching on _exactly_ `N` simple patterns with types `T1, T1, ..., T1`, where `N` is the runtime size of the sequence, or +- Pattern-matching on `>= N` simple patterns and _a vararg pattern_ (e.g., `xs: _*`) with types `T1, T1, ..., T1, Seq[T1]`, where `N` is the minimum size of the sequence. + + + +```scala +object CharList: + def unapplySeq(s: String): Option[Seq[Char]] = Some(s.toList) + +"example" match + case CharList(c1, c2, c3, c4, _, _, _) => + println(s"$c1,$c2,$c3,$c4") + case _ => + println("Expected *exactly* 7 characters!") + +// e,x,a,m +``` + +## Product-Sequence Match + +- `U <: Product` +- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- `PN` conforms to the signature `X` defined in Seq Pattern +- Pattern-matching on exactly `>= N` patterns, the first `N - 1` patterns have types `P1, P2, ... P(N-1)`, + the type of the remaining patterns are determined as in Seq Pattern. + +```scala +class Foo(val name: String, val children: Int*) +object Foo: + def unapplySeq(f: Foo): Option[(String, Seq[Int])] = + Some((f.name, f.children)) + +def foo(f: Foo) = f match + case Foo(name, x, y, ns*) => ">= two children." + case Foo(name, ns*) => => "< two children." +``` + +There are plans for further simplification, in particular to factor out _product match_ +and _name-based match_ into a single type of extractor. + +## Type testing + +Abstract type testing with `ClassTag` is replaced with `TypeTest` or the alias `Typeable`. + +- pattern `_: X` for an abstract type requires a `TypeTest` in scope +- pattern `x @ X()` for an unapply that takes an abstract type requires a `TypeTest` in scope + +[More details on `TypeTest`](../other-new-features/type-test.md) diff --git a/docs/_spec/TODOreference/changed-features/structural-types-spec.md b/docs/_spec/TODOreference/changed-features/structural-types-spec.md new file mode 100644 index 000000000000..d456932649fb --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/structural-types-spec.md @@ -0,0 +1,153 @@ +--- +layout: doc-page +title: "Programmatic Structural Types - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/structural-types-spec.html +--- + +## Syntax + +``` +SimpleType ::= ... | Refinement +Refinement ::= ‘{’ RefineStatSeq ‘}’ +RefineStatSeq ::= RefineStat {semi RefineStat} +RefineStat ::= ‘val’ VarDcl | ‘def’ DefDcl | ‘type’ {nl} TypeDcl +``` + +## Implementation of Structural Types + +The standard library defines a universal marker trait +[`scala.Selectable`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/Selectable.scala): + +```scala +trait Selectable extends Any +``` + +An implementation of `Selectable` that relies on [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html) is +available in the standard library: `scala.reflect.Selectable`. Other +implementations can be envisioned for platforms where Java reflection +is not available. + +Implementations of `Selectable` have to make available one or both of +the methods `selectDynamic` and `applyDynamic`. The methods could be members of the `Selectable` implementation or they could be extension methods. + +The `selectDynamic` method takes a field name and returns the value associated with that name in the `Selectable`. +It should have a signature of the form: + +```scala +def selectDynamic(name: String): T +``` + +Often, the return type `T` is `Any`. + +Unlike `scala.Dynamic`, there is no special meaning for an `updateDynamic` method. +However, we reserve the right to give it meaning in the future. +Consequently, it is recommended not to define any member called `updateDynamic` in `Selectable`s. + +The `applyDynamic` method is used for selections that are applied to arguments. It takes a method name and possibly `Class`es representing its parameters types as well as the arguments to pass to the function. +Its signature should be of one of the two following forms: + +```scala +def applyDynamic(name: String)(args: Any*): T +def applyDynamic(name: String, ctags: Class[?]*)(args: Any*): T +``` + +Both versions are passed the actual arguments in the `args` parameter. The second version takes in addition a vararg argument of `java.lang.Class`es that identify the method's parameter classes. Such an argument is needed +if `applyDynamic` is implemented using Java reflection, but it could be +useful in other cases as well. `selectDynamic` and `applyDynamic` can also take additional context parameters in using clauses. These are resolved in the normal way at the callsite. + +Given a value `v` of type `C { Rs }`, where `C` is a class reference +and `Rs` are structural refinement declarations, and given `v.a` of type `U`, we consider three distinct cases: + +- If `U` is a value type, we map `v.a` to: + ```scala + v.selectDynamic("a").asInstanceOf[U] + ``` + +- If `U` is a method type `(T11, ..., T1n)...(TN1, ..., TNn): R` and it is not a dependent method type, we map `v.a(a11, ..., a1n)...(aN1, ..., aNn)` to: + ```scala + v.applyDynamic("a")(a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + If this call resolves to an `applyDynamic` method of the second form that takes a `Class[?]*` argument, we further rewrite this call to + ```scala + v.applyDynamic("a", c11, ..., c1n, ..., cN1, ... cNn)( + a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + where each `c_ij` is the literal `java.lang.Class[?]` of the type of the formal parameter `Tij`, i.e., `classOf[Tij]`. + +- If `U` is neither a value nor a method type, or a dependent method + type, an error is emitted. + +Note that `v`'s static type does not necessarily have to conform to `Selectable`, nor does it need to have `selectDynamic` and `applyDynamic` as members. It suffices that there is an implicit +conversion that can turn `v` into a `Selectable`, and the selection methods could also be available as +[extension methods](../contextual/extension-methods.md). + +## Limitations of Structural Types + +- Dependent methods cannot be called via structural call. + +- Refinements may not introduce overloads: If a refinement specifies the signature + of a method `m`, and `m` is also defined in the parent type of the refinement, then + the new signature must properly override the existing one. + +- Subtyping of structural refinements must preserve erased parameter types: Assume + we want to prove `S <: T { def m(x: A): B }`. Then, as usual, `S` must have a member method `m` that can take an argument of type `A`. Furthermore, if `m` is not a member of `T` (i.e. the refinement is structural), an additional condition applies. In this case, the member _definition_ `m` of `S` will have a parameter + with type `A'` say. The additional condition is that the erasure of `A'` and `A` is the same. Here is an example: + + ```scala + class Sink[A] { def put(x: A): Unit = {} } + val a = Sink[String]() + val b: { def put(x: String): Unit } = a // error + b.put("abc") // looks for a method with a `String` parameter + ``` + The second to last line is not well-typed, + since the erasure of the parameter type of `put` in class `Sink` is `Object`, + but the erasure of `put`'s parameter in the type of `b` is `String`. + This additional condition is necessary, since we will have to resort + to some (as yet unknown) form of reflection to call a structural member + like `put` in the type of `b` above. The condition ensures that the statically + known parameter types of the refinement correspond up to erasure to the + parameter types of the selected call target at runtime. + + Most reflection dispatch algorithms need to know exact erased parameter types. For instance, if the example above would typecheck, the call + `b.put("abc")` on the last line would look for a method `put` in the runtime type of `b` that takes a `String` parameter. But the `put` method is the one from class `Sink`, which takes an `Object` parameter. Hence the call would fail at runtime with a `NoSuchMethodException`. + + One might hope for a "more intelligent" reflexive dispatch algorithm that does not require exact parameter type matching. Unfortunately, this can always run into ambiguities, as long as overloading is a possibility. For instance, continuing the example above, we might introduce a new subclass `Sink1` of `Sink` and change the definition of `a` as follows: + + ```scala + class Sink1[A] extends Sink[A] { def put(x: "123") = ??? } + val a: Sink[String] = Sink1[String]() + ``` + + Now there are two `put` methods in the runtime type of `b` with erased parameter + types `Object` and `String`, respectively. Yet dynamic dispatch still needs to go + to the first `put` method, even though the second looks like a better match. + + For the cases where we can in fact implement reflection without knowing precise parameter types (for instance if static overloading is replaced by dynamically dispatched multi-methods), there is an escape hatch. For types that extend `scala.Selectable.WithoutPreciseParameterTypes` the signature check is omitted. Example: + + ```scala + trait MultiMethodSelectable extends Selectable.WithoutPreciseParameterTypes: + // Assume this version of `applyDynamic` can be implemented without knowing + // precise parameter types `paramTypes`: + def applyDynamic(name: String, paramTypes: Class[_]*)(args: Any*): Any = ??? + + class Sink[A] extends MultiMethodSelectable: + def put(x: A): Unit = {} + + val a = new Sink[String] + val b: MultiMethodSelectable { def put(x: String): Unit } = a // OK + ``` +## Differences with Scala 2 Structural Types + +- Scala 2 supports structural types by means of Java reflection. Unlike + Scala 3, structural calls do not rely on a mechanism such as + `Selectable`, and reflection cannot be avoided. +- In Scala 2, refinements can introduce overloads. +- In Scala 2, mutable `var`s are allowed in refinements. In Scala 3, + they are no longer allowed. +- Scala 2 does not impose the "same-erasure" restriction on subtyping of structural types. It allows some calls to fail at runtime instead. + +## Context + +For more information, see [Rethink Structural Types](https://github.com/lampepfl/dotty/issues/1886). diff --git a/docs/_spec/TODOreference/changed-features/structural-types.md b/docs/_spec/TODOreference/changed-features/structural-types.md new file mode 100644 index 000000000000..37e583332cf1 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/structural-types.md @@ -0,0 +1,191 @@ +--- +layout: doc-page +title: "Programmatic Structural Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/structural-types.html +--- + +## Motivation + +Some usecases, such as modelling database access, are more awkward in +statically typed languages than in dynamically typed languages: With +dynamically typed languages, it's quite natural to model a row as a +record or object, and to select entries with simple dot notation (e.g. +`row.columnName`). + +Achieving the same experience in statically typed +language requires defining a class for every possible row arising from +database manipulation (including rows arising from joins and +projections) and setting up a scheme to map between a row and the +class representing it. + +This requires a large amount of boilerplate, which leads developers to +trade the advantages of static typing for simpler schemes where colum +names are represented as strings and passed to other operators (e.g. +`row.select("columnName")`). This approach forgoes the advantages of +static typing, and is still not as natural as the dynamically typed +version. + +Structural types help in situations where we would like to support +simple dot notation in dynamic contexts without losing the advantages +of static typing. They allow developers to use dot notation and +configure how fields and methods should be resolved. + +## Example + +Here's an example of a structural type `Person`: + +```scala + class Record(elems: (String, Any)*) extends Selectable: + private val fields = elems.toMap + def selectDynamic(name: String): Any = fields(name) + + type Person = Record { val name: String; val age: Int } + ``` + +The type `Person` adds a _refinement_ to its parent type `Record` that defines the two fields `name` and `age`. We say the refinement is _structural_ since `name` and `age` are not defined in the parent type. But they exist nevertheless as members of class `Person`. For instance, the following +program would print "Emma is 42 years old.": + +```scala + val person = Record("name" -> "Emma", "age" -> 42).asInstanceOf[Person] + println(s"${person.name} is ${person.age} years old.") +``` + +The parent type `Record` in this example is a generic class that can represent arbitrary records in its `elems` argument. This argument is a +sequence of pairs of labels of type `String` and values of type `Any`. +When we create a `Person` as a `Record` we have to assert with a typecast +that the record defines the right fields of the right types. `Record` +itself is too weakly typed so the compiler cannot know this without +help from the user. In practice, the connection between a structural type +and its underlying generic representation would most likely be done by +a database layer, and therefore would not be a concern of the end user. + +`Record` extends the marker trait [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) and defines +a method `selectDynamic`, which maps a field name to its value. +Selecting a structural type member is done by calling this method. +The `person.name` and `person.age` selections are translated by +the Scala compiler to: + +```scala + person.selectDynamic("name").asInstanceOf[String] + person.selectDynamic("age").asInstanceOf[Int] +``` + +Besides `selectDynamic`, a `Selectable` class sometimes also defines a method `applyDynamic`. This can then be used to translate function calls of structural members. So, if `a` is an instance of `Selectable`, a structural call like `a.f(b, c)` would translate to + +```scala + a.applyDynamic("f")(b, c) +``` + +## Using Java Reflection + +Structural types can also be accessed using [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html). Example: + +```scala + type Closeable = { def close(): Unit } + + class FileInputStream: + def close(): Unit + + class Channel: + def close(): Unit +``` + +Here, we define a structural type `Closeable` that defines a `close` method. There are various classes that have `close` methods, we just list [`FileInputStream`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/FileInputStream.html#%3Cinit%3E(java.io.File)) and [`Channel`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/channels/Channel.html) as two examples. It would be easiest if the two classes shared a common interface that factors out the `close` method. But such factorings are often not possible if different libraries are combined in one application. Yet, we can still have methods that work on +all classes with a `close` method by using the `Closeable` type. For instance, + +```scala + import scala.reflect.Selectable.reflectiveSelectable + + def autoClose(f: Closeable)(op: Closeable => Unit): Unit = + try op(f) finally f.close() +``` + +The call `f.close()` has to use Java reflection to identify and call the `close` method in the receiver `f`. This needs to be enabled by an import +of `reflectiveSelectable` shown above. What happens "under the hood" is then the following: + + - The import makes available an implicit conversion that turns any type into a + `Selectable`. `f` is wrapped in this conversion. + + - The compiler then transforms the `close` call on the wrapped `f` + to an `applyDynamic` call. The end result is: + + ```scala + reflectiveSelectable(f).applyDynamic("close")() + ``` + - The implementation of `applyDynamic` in `reflectiveSelectable`'s result +uses Java reflection to find and call a method `close` with zero parameters in the value referenced by `f` at runtime. + +Structural calls like this tend to be much slower than normal method calls. The mandatory import of `reflectiveSelectable` serves as a signpost that something inefficient is going on. + +**Note:** In Scala 2, Java reflection is the only mechanism available for structural types and it is automatically enabled without needing the +`reflectiveSelectable` conversion. However, to warn against inefficient +dispatch, Scala 2 requires a language import `import scala.language.reflectiveCalls`. + +Before resorting to structural calls with Java reflection one should consider alternatives. For instance, sometimes a more modular _and_ efficient architecture can be obtained using type classes. + +## Extensibility + +New instances of `Selectable` can be defined to support means of +access other than Java reflection, which would enable usages such as +the database access example given at the beginning of this document. + +## Local Selectable Instances + +Local and anonymous classes that extend `Selectable` get more refined types +than other classes. Here is an example: + +```scala +trait Vehicle extends reflect.Selectable: + val wheels: Int + +val i3 = new Vehicle: // i3: Vehicle { val range: Int } + val wheels = 4 + val range = 240 + +i3.range +``` + +The type of `i3` in this example is `Vehicle { val range: Int }`. Hence, +`i3.range` is well-formed. Since the base class `Vehicle` does not define a `range` field or method, we need structural dispatch to access the `range` field of the anonymous class that initializes `id3`. Structural dispatch +is implemented by the base trait [`reflect.Selectable`](https://scala-lang.org/api/3.x/scala/reflect/Selectable.html) of `Vehicle`, which defines the necessary `selectDynamic` member. + +`Vehicle` could also extend some other subclass of [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) that implements `selectDynamic` and `applyDynamic` differently. But if it does not extend a `Selectable` at all, the code would no longer typecheck: + +```scala +trait Vehicle: + val wheels: Int + +val i3 = new Vehicle: // i3: Vehicle + val wheels = 4 + val range = 240 + +i3.range // error: range is not a member of `Vehicle` +``` + +The difference is that the type of an anonymous class that does not extend `Selectable` is just formed from the parent type(s) of the class, without +adding any refinements. Hence, `i3` now has just type `Vehicle` and the selection `i3.range` gives a "member not found" error. + +Note that in Scala 2 all local and anonymous classes could produce values with refined types. But +members defined by such refinements could be selected only with the language import +[`reflectiveCalls`](https://scala-lang.org/api/3.x/scala/languageFeature$$reflectiveCalls$.html). + +## Relation with `scala.Dynamic` + +There are clearly some connections with [`scala.Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) here, since +both select members programmatically. But there are also some +differences. + +- Fully dynamic selection is not typesafe, but structural selection + is, as long as the correspondence of the structural type with the + underlying value is as stated. + +- [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) is just a marker trait, which gives more leeway where and + how to define reflective access operations. By contrast + `Selectable` is a trait which declares the access operations. + +- Two access operations, `selectDynamic` and `applyDynamic` are shared + between both approaches. In `Selectable`, `applyDynamic` also may also take + [`java.lang.Class`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Class.html) arguments indicating the method's formal parameter types. + [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) comes with `updateDynamic`. + +[More details](structural-types-spec.md) diff --git a/docs/_spec/TODOreference/changed-features/type-checking.md b/docs/_spec/TODOreference/changed-features/type-checking.md new file mode 100644 index 000000000000..6f59b1a1c1c6 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/type-checking.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Changes in Type Checking" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/type-checking.html +--- + +*** **TO BE FILLED IN** *** diff --git a/docs/_spec/TODOreference/changed-features/type-inference.md b/docs/_spec/TODOreference/changed-features/type-inference.md new file mode 100644 index 000000000000..00d0e959f5ed --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/type-inference.md @@ -0,0 +1,10 @@ +--- +layout: doc-page +title: "Changes in Type Inference" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/type-inference.html +--- + +For more information, see the two presentations + +* [Scala 3, Type inference and You!](https://www.youtube.com/watch?v=lMvOykNQ4zs) by Guillaume Martres (September 2019) +* [GADTs in Dotty](https://www.youtube.com/watch?v=VV9lPg3fNl8) by Aleksander Boruch-Gruszecki (July 2019). diff --git a/docs/_spec/TODOreference/changed-features/vararg-splices.md b/docs/_spec/TODOreference/changed-features/vararg-splices.md new file mode 100644 index 000000000000..43c4acc5f880 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/vararg-splices.md @@ -0,0 +1,40 @@ +--- +layout: doc-page +title: "Vararg Splices" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/vararg-splices.html +--- + +The syntax of vararg splices in patterns and function arguments has changed. The new syntax uses a postfix `*`, analogously to how a vararg parameter is declared. + +```scala +val arr = Array(0, 1, 2, 3) +val lst = List(arr*) // vararg splice argument +lst match + case List(0, 1, xs*) => println(xs) // binds xs to Seq(2, 3) + case List(1, _*) => // wildcard pattern +``` + +The old syntax for splice arguments will be phased out. + +```scala +/*!*/ val lst = List(arr: _*) // syntax error + lst match + case List(0, 1, xs @ _*) // ok, equivalent to `xs*` +``` + +## Syntax + +``` +ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ + | ‘(’ [Patterns ‘,’] Pattern2 ‘*’ ‘)’ + +ParArgumentExprs ::= ‘(’ [‘using’] ExprsInParens ‘)’ + | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ +``` + +## Compatibility considerations + +To enable cross compilation between Scala 2 and Scala 3, the compiler will +accept both the old and the new syntax. Under the `-source future` setting, an error +will be emitted when the old syntax is encountered. An automatic rewrite from old +to new syntax is offered under `-source future-migration`. diff --git a/docs/_spec/TODOreference/changed-features/wildcards.md b/docs/_spec/TODOreference/changed-features/wildcards.md new file mode 100644 index 000000000000..0d3e13c3d7e0 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/wildcards.md @@ -0,0 +1,50 @@ +--- +layout: doc-page +title: Wildcard Arguments in Types +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/wildcards.html +--- + +The syntax of wildcard arguments in types has changed from `_` to `?`. Example: +```scala +List[?] +Map[? <: AnyRef, ? >: Null] +``` + +## Motivation + +We would like to use the underscore syntax `_` to stand for an anonymous type parameter, aligning it with its meaning in +value parameter lists. So, just as `f(_)` is a shorthand for the lambda `x => f(x)`, in the future `C[_]` will be a shorthand +for the type lambda `[X] =>> C[X]`. This makes higher-kinded types easier to use. It also removes the wart that, used as a type +parameter, `F[_]` means `F` is a type constructor whereas used as a type, `F[_]` means it is a wildcard (i.e. existential) type. +In the future, `F[_]` will mean the same thing, no matter where it is used. + +We pick `?` as a replacement syntax for wildcard types, since it aligns with +[Java's syntax](https://docs.oracle.com/javase/tutorial/java/generics/wildcardGuidelines.html). + +## Migration Strategy + +The migration to the new scheme is complicated, in particular since the [kind projector](https://github.com/typelevel/kind-projector) +compiler plugin still uses the reverse convention, with `?` meaning parameter placeholder instead of wildcard. Fortunately, kind projector has added `*` as an alternative syntax for `?`. + +A step-by-step migration is made possible with the following measures: + + 1. In Scala 3.0, both `_` and `?` are legal names for wildcards. + 2. In Scala 3.1, `_` is deprecated in favor of `?` as a name for a wildcard. A `-rewrite` option is + available to rewrite one to the other. + 3. In Scala 3.2, the meaning of `_` changes from wildcard to placeholder for type parameter. + 4. The Scala 3.1 behavior is already available today under the `-source future` setting. + +To smooth the transition for codebases that use kind-projector, we adopt the following measures under the command line +option `-Ykind-projector`: + + 1. In Scala 3.0, `*` is available as a type parameter placeholder. + 2. In Scala 3.2, `*` is deprecated in favor of `_`. A `-rewrite` option is + available to rewrite one to the other. + 3. In Scala 3.3, `*` is removed again, and all type parameter placeholders will be expressed with `_`. + +These rules make it possible to cross build between Scala 2 using the kind projector plugin and Scala 3.0 - 3.2 using the compiler option `-Ykind-projector`. + +There is also a migration path for users that want a one-time transition to syntax with `_` as a type parameter placeholder. +With option `-Ykind-projector:underscores` Scala 3 will regard `_` as a type parameter placeholder, leaving `?` as the only syntax for wildcards. + +To cross-compile with old Scala 2 sources, while using `_` a placeholder, you must use options `-Xsource:3 -P:kind-projector:underscore-placeholders` together with a recent version of kind-projector (`0.13` and higher) and most recent versions of Scala 2 (`2.13.5` and higher and `2.12.14` and higher) diff --git a/docs/_spec/TODOreference/contextual/by-name-context-parameters.md b/docs/_spec/TODOreference/contextual/by-name-context-parameters.md new file mode 100644 index 000000000000..3004bfb2c4c2 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/by-name-context-parameters.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "By-Name Context Parameters" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/by-name-context-parameters.html +--- + +Context parameters can be declared by-name to avoid a divergent inferred expansion. Example: + +```scala +trait Codec[T]: + def write(x: T): Unit + +given intCodec: Codec[Int] = ??? + +given optionCodec[T](using ev: => Codec[T]): Codec[Option[T]] with + def write(xo: Option[T]) = xo match + case Some(x) => ev.write(x) + case None => + +val s = summon[Codec[Option[Int]]] + +s.write(Some(33)) +s.write(None) +``` +As is the case for a normal by-name parameter, the argument for the context parameter `ev` +is evaluated on demand. In the example above, if the option value `x` is `None`, it is +not evaluated at all. + +The synthesized argument for a context parameter is backed by a local val +if this is necessary to prevent an otherwise diverging expansion. + +The precise steps for synthesizing an argument for a by-name context parameter of type `=> T` are as follows. + + 1. Create a new given of type `T`: + + ```scala + given lv: T = ??? + ``` + + where `lv` is an arbitrary fresh name. + + 1. This given is not immediately available as candidate for argument inference (making it immediately available could result in a loop in the synthesized computation). But it becomes available in all nested contexts that look again for an argument to a by-name context parameter. + + 1. If this search succeeds with expression `E`, and `E` contains references to `lv`, replace `E` by + + ```scala + { given lv: T = E; lv } + ``` + + Otherwise, return `E` unchanged. + +In the example above, the definition of `s` would be expanded as follows. + +```scala +val s = summon[Test.Codec[Option[Int]]]( + optionCodec[Int](using intCodec) +) +``` + +No local given instance was generated because the synthesized argument is not recursive. + +## Reference + +For more information, see [Issue #1998](https://github.com/lampepfl/dotty/issues/1998) +and the associated [Scala SIP](https://docs.scala-lang.org/sips/byname-implicits.html). diff --git a/docs/_spec/TODOreference/contextual/context-bounds.md b/docs/_spec/TODOreference/contextual/context-bounds.md new file mode 100644 index 000000000000..42479d6802b3 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/context-bounds.md @@ -0,0 +1,53 @@ +--- +layout: doc-page +title: "Context Bounds" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-bounds.html +--- + +A context bound is a shorthand for expressing the common pattern of a context parameter that depends on a type parameter. Using a context bound, the `maximum` function of the last section can be written like this: + +```scala +def maximum[T: Ord](xs: List[T]): T = xs.reduceLeft(max) +``` + +A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `using Ord[T]`. The context parameter(s) generated from context bounds +are added as follows: + + - If the method parameters end in an implicit parameter list or using clause, + context parameters are added in front of that list. + - Otherwise they are added as a separate parameter clause at the end. + +Example: + +```scala +def f[T: C1 : C2, U: C3](x: T)(using y: U, z: V): R +``` + +would expand to + +```scala +def f[T, U](x: T)(using _: C1[T], _: C2[T], _: C3[U], y: U, z: V): R +``` + +Context bounds can be combined with subtype bounds. If both are present, subtype bounds come first, e.g. + +```scala +def g[T <: B : C](x: T): R = ... +``` + +## Migration + +To ease migration, context bounds in Dotty map in Scala 3.0 to old-style implicit parameters +for which arguments can be passed either with a `(using ...)` clause or with a normal application. From Scala 3.1 on, they will map to context parameters instead, as is described above. + +If the source version is `future-migration`, any pairing of an evidence +context parameter stemming from a context bound with a normal argument will give a migration +warning. The warning indicates that a `(using ...)` clause is needed instead. The rewrite can be +done automatically under `-rewrite`. + +## Syntax + +``` +TypeParamBounds ::= [SubtypeBounds] {ContextBound} +ContextBound ::= ‘:’ Type +``` diff --git a/docs/_spec/TODOreference/contextual/context-functions-spec.md b/docs/_spec/TODOreference/contextual/context-functions-spec.md new file mode 100644 index 000000000000..109513e9da86 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/context-functions-spec.md @@ -0,0 +1,79 @@ +--- +layout: doc-page +title: "Context Functions - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-functions-spec.html +--- + +## Syntax + +``` +Type ::= ... + | FunArgTypes ‘?=>’ Type +Expr ::= ... + | FunParams ‘?=>’ Expr +``` + +Context function types associate to the right, e.g. +`S ?=> T ?=> U` is the same as `S ?=> (T ?=> U)`. + +## Implementation + +Context function types are shorthands for class types that define `apply` +methods with context parameters. Specifically, the `N`-ary function type + +`T1, ..., TN ?=> R` is a shorthand for the class type +`ContextFunctionN[T1, ..., TN, R]`. Such class types are assumed to have the following definitions, for any value of `N >= 1`: + +```scala +package scala +trait ContextFunctionN[-T1, ..., -TN, +R]: + def apply(using x1: T1, ..., xN: TN): R +``` + +Context function types erase to normal function types, so these classes are +generated on the fly for typechecking, but not realized in actual code. + +Context function literals `(x1: T1, ..., xn: Tn) ?=> e` map +context parameters `xi` of types `Ti` to the result of evaluating the expression `e`. +The scope of each context parameter `xi` is `e`. The parameters must have pairwise distinct names. + +If the expected type of the context function literal is of the form +`scala.ContextFunctionN[S1, ..., Sn, R]`, the expected type of `e` is `R` and +the type `Ti` of any of the parameters `xi` can be omitted, in which case `Ti += Si` is assumed. If the expected type of the context function literal is +some other type, all context parameter types must be explicitly given, and the expected type of `e` is undefined. +The type of the context function literal is `scala.ContextFunctionN[S1, ...,Sn, T]`, where `T` is the widened +type of `e`. `T` must be equivalent to a type which does not refer to any of +the context parameters `xi`. + +The context function literal is evaluated as the instance creation expression + +```scala +new scala.ContextFunctionN[T1, ..., Tn, T]: + def apply(using x1: T1, ..., xn: Tn): T = e +``` + +A context parameter may also be a wildcard represented by an underscore `_`. In that case, a fresh name for the parameter is chosen arbitrarily. + +**Note:** The closing paragraph of the +[Anonymous Functions section](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#anonymous-functions) +of Scala 2.13 is subsumed by context function types and should be removed. + +Context function literals `(x1: T1, ..., xn: Tn) ?=> e` are +automatically created for any expression `e` whose expected type is +`scala.ContextFunctionN[T1, ..., Tn, R]`, unless `e` is +itself a context function literal. This is analogous to the automatic +insertion of [`scala.Function0`](https://scala-lang.org/api/3.x/scala/Function0.html) around expressions in by-name argument position. + +Context function types generalize to `N > 22` in the same way that function types do, see [the corresponding +documentation](../dropped-features/limit22.md). + +## Examples + +See the section on Expressiveness from [Simplicitly: foundations and +applications of implicit function +types](https://dl.acm.org/citation.cfm?id=3158130). + +## Type Checking + +After desugaring no additional typing rules are required for context function types. diff --git a/docs/_spec/TODOreference/contextual/context-functions.md b/docs/_spec/TODOreference/contextual/context-functions.md new file mode 100644 index 000000000000..0ad3c8757782 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/context-functions.md @@ -0,0 +1,154 @@ +--- +layout: doc-page +title: "Context Functions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-functions.html +--- + +_Context functions_ are functions with (only) context parameters. +Their types are _context function types_. Here is an example of a context function type: + +```scala +type Executable[T] = ExecutionContext ?=> T +``` +Context functions are written using `?=>` as the "arrow" sign. +They are applied to synthesized arguments, in +the same way methods with context parameters are applied. For instance: +```scala + given ec: ExecutionContext = ... + + def f(x: Int): ExecutionContext ?=> Int = ... + + // could be written as follows with the type alias from above + // def f(x: Int): Executable[Int] = ... + + f(2)(using ec) // explicit argument + f(2) // argument is inferred +``` +Conversely, if the expected type of an expression `E` is a context function type +`(T_1, ..., T_n) ?=> U` and `E` is not already an +context function literal, `E` is converted to a context function literal by rewriting it to +```scala + (x_1: T1, ..., x_n: Tn) ?=> E +``` +where the names `x_1`, ..., `x_n` are arbitrary. This expansion is performed +before the expression `E` is typechecked, which means that `x_1`, ..., `x_n` +are available as givens in `E`. + +Like their types, context function literals are written using `?=>` as the arrow between parameters and results. They differ from normal function literals in that their types are context function types. + +For example, continuing with the previous definitions, +```scala + def g(arg: Executable[Int]) = ... + + g(22) // is expanded to g((ev: ExecutionContext) ?=> 22) + + g(f(2)) // is expanded to g((ev: ExecutionContext) ?=> f(2)(using ev)) + + g((ctx: ExecutionContext) ?=> f(3)) // is expanded to g((ctx: ExecutionContext) ?=> f(3)(using ctx)) + g((ctx: ExecutionContext) ?=> f(3)(using ctx)) // is left as it is +``` + +## Example: Builder Pattern + +Context function types have considerable expressive power. For +instance, here is how they can support the "builder pattern", where +the aim is to construct tables like this: +```scala + table { + row { + cell("top left") + cell("top right") + } + row { + cell("bottom left") + cell("bottom right") + } + } +``` +The idea is to define classes for `Table` and `Row` that allow the +addition of elements via `add`: +```scala + class Table: + val rows = new ArrayBuffer[Row] + def add(r: Row): Unit = rows += r + override def toString = rows.mkString("Table(", ", ", ")") + + class Row: + val cells = new ArrayBuffer[Cell] + def add(c: Cell): Unit = cells += c + override def toString = cells.mkString("Row(", ", ", ")") + + case class Cell(elem: String) +``` +Then, the `table`, `row` and `cell` constructor methods can be defined +with context function types as parameters to avoid the plumbing boilerplate +that would otherwise be necessary. +```scala + def table(init: Table ?=> Unit) = + given t: Table = Table() + init + t + + def row(init: Row ?=> Unit)(using t: Table) = + given r: Row = Row() + init + t.add(r) + + def cell(str: String)(using r: Row) = + r.add(new Cell(str)) +``` +With that setup, the table construction code above compiles and expands to: +```scala + table { ($t: Table) ?=> + + row { ($r: Row) ?=> + cell("top left")(using $r) + cell("top right")(using $r) + }(using $t) + + row { ($r: Row) ?=> + cell("bottom left")(using $r) + cell("bottom right")(using $r) + }(using $t) + } +``` +## Example: Postconditions + +As a larger example, here is a way to define constructs for checking arbitrary postconditions using an extension method `ensuring` so that the checked result can be referred to simply by `result`. The example combines opaque type aliases, context function types, and extension methods to provide a zero-overhead abstraction. + +```scala +object PostConditions: + opaque type WrappedResult[T] = T + + def result[T](using r: WrappedResult[T]): T = r + + extension [T](x: T) + def ensuring(condition: WrappedResult[T] ?=> Boolean): T = + assert(condition(using x)) + x +end PostConditions +import PostConditions.{ensuring, result} + +val s = List(1, 2, 3).sum.ensuring(result == 6) +``` +**Explanations**: We use a context function type `WrappedResult[T] ?=> Boolean` +as the type of the condition of `ensuring`. An argument to `ensuring` such as +`(result == 6)` will therefore have a given of type `WrappedResult[T]` in +scope to pass along to the `result` method. `WrappedResult` is a fresh type, to make sure +that we do not get unwanted givens in scope (this is good practice in all cases +where context parameters are involved). Since `WrappedResult` is an opaque type alias, its +values need not be boxed, and since `ensuring` is added as an extension method, its argument +does not need boxing either. Hence, the implementation of `ensuring` is close in efficiency to the best possible code one could write by hand: + +```scala +val s = + val result = List(1, 2, 3).sum + assert(result == 6) + result +``` +## Reference + +For more information, see the [blog article](https://www.scala-lang.org/blog/2016/12/07/implicit-function-types.html), +(which uses a different syntax that has been superseded). + +[More details](./context-functions-spec.md) diff --git a/docs/_spec/TODOreference/contextual/contextual.md b/docs/_spec/TODOreference/contextual/contextual.md new file mode 100644 index 000000000000..fda63397f8f9 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/contextual.md @@ -0,0 +1,83 @@ +--- +layout: index +title: "Contextual Abstractions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual.html +--- + +## Critique of the Status Quo + +Scala's implicits are its most distinguished feature. They are _the_ fundamental way to abstract over context. They represent a unified paradigm with a great variety of use cases, among them: implementing type classes, establishing context, dependency injection, expressing capabilities, computing new types and proving relationships between them. + +Following Haskell, Scala was the second popular language to have some form of implicits. Other languages have followed suit. E.g [Rust's traits](https://doc.rust-lang.org/rust-by-example/trait.html) or [Swift's protocol extensions](https://docs.swift.org/swift-book/LanguageGuide/Protocols.html#ID521). Design proposals are also on the table for Kotlin as [compile time dependency resolution](https://github.com/Kotlin/KEEP/blob/e863b25f8b3f2e9b9aaac361c6ee52be31453ee0/proposals/compile-time-dependency-resolution.md), for C# as [Shapes and Extensions](https://github.com/dotnet/csharplang/issues/164) +or for F# as [Traits](https://github.com/MattWindsor91/visualfsharp/blob/hackathon-vs/examples/fsconcepts.md). Implicits are also a common feature of theorem provers such as [Coq](https://coq.inria.fr/refman/language/extensions/implicit-arguments.html) or [Agda](https://agda.readthedocs.io/en/latest/language/implicit-arguments.html). + +Even though these designs use widely different terminology, they are all variants of the core idea of _term inference_. Given a type, the compiler synthesizes a "canonical" term that has that type. Scala embodies the idea in a purer form than most other languages: An implicit parameter directly leads to an inferred argument term that could also be written down explicitly. By contrast, type class based designs are less direct since they hide term inference behind some form of type classification and do not offer the option of writing the inferred quantities (typically, dictionaries) explicitly. + +Given that term inference is where the industry is heading, and given that Scala has it in a very pure form, how come implicits are not more popular? In fact, it's fair to say that implicits are at the same time Scala's most distinguished and most controversial feature. I believe this is due to a number of aspects that together make implicits harder to learn than necessary and also make it harder to prevent abuses. + +Particular criticisms are: + +1. Being very powerful, implicits are easily over-used and mis-used. This observation holds in almost all cases when we talk about _implicit conversions_, which, even though conceptually different, share the same syntax with other implicit definitions. For instance, regarding the two definitions + + ```scala + implicit def i1(implicit x: T): C[T] = ... + implicit def i2(x: T): C[T] = ... + ``` + + the first of these is a conditional implicit _value_, the second an implicit _conversion_. Conditional implicit values are a cornerstone for expressing type classes, whereas most applications of implicit conversions have turned out to be of dubious value. The problem is that many newcomers to the language start with defining implicit conversions since they are easy to understand and seem powerful and convenient. Scala 3 will put under a language flag both definitions and applications of "undisciplined" implicit conversions between types defined elsewhere. This is a useful step to push back against overuse of implicit conversions. But the problem remains that syntactically, conversions and values just look too similar for comfort. + +2. Another widespread abuse is over-reliance on implicit imports. This often leads to inscrutable type errors that go away with the right import incantation, leaving a feeling of frustration. Conversely, it is hard to see what implicits a program uses since implicits can hide anywhere in a long list of imports. + +3. The syntax of implicit definitions is too minimal. It consists of a single modifier, `implicit`, that can be attached to a large number of language constructs. A problem with this for newcomers is that it conveys mechanism instead of intent. For instance, a type class instance is an implicit object or val if unconditional and an implicit def with implicit parameters referring to some class if conditional. This describes precisely what the implicit definitions translate to -- just drop the `implicit` modifier, and that's it! But the cues that define intent are rather indirect and can be easily misread, as demonstrated by the definitions of `i1` and `i2` above. + +4. The syntax of implicit parameters also has shortcomings. While implicit _parameters_ are designated specifically, arguments are not. Passing an argument to an implicit parameter looks like a regular application `f(arg)`. This is problematic because it means there can be confusion regarding what parameter gets instantiated in a call. For instance, in + + ```scala + def currentMap(implicit ctx: Context): Map[String, Int] + ``` + + one cannot write `currentMap("abc")` since the string `"abc"` is taken as explicit argument to the implicit `ctx` parameter. One has to write `currentMap.apply("abc")` instead, which is awkward and irregular. For the same reason, a method definition can only have one implicit parameter section and it must always come last. This restriction not only reduces orthogonality, but also prevents some useful program constructs, such as a method with a regular parameter whose type depends on an implicit value. Finally, it's also a bit annoying that implicit parameters must have a name, even though in many cases that name is never referenced. + +5. Implicits pose challenges for tooling. The set of available implicits depends on context, so command completion has to take context into account. This is feasible in an IDE but tools like [Scaladoc](https://docs.scala-lang.org/overviews/scaladoc/overview.html) that are based on static web pages can only provide an approximation. Another problem is that failed implicit searches often give very unspecific error messages, in particular if some deeply recursive implicit search has failed. Note that the Scala 3 compiler has already made a lot of progress in the error diagnostics area. If a recursive search fails some levels down, it shows what was constructed and what is missing. Also, it suggests imports that can bring missing implicits in scope. + +None of the shortcomings is fatal, after all implicits are very widely used, and many libraries and applications rely on them. But together, they make code using implicits a lot more cumbersome and less clear than it could be. + +Historically, many of these shortcomings come from the way implicits were gradually "discovered" in Scala. Scala originally had only implicit conversions with the intended use case of "extending" a class or trait after it was defined, i.e. what is expressed by implicit classes in later versions of Scala. Implicit parameters and instance definitions came later in 2006 and we picked similar syntax since it seemed convenient. For the same reason, no effort was made to distinguish implicit imports or arguments from normal ones. + +Existing Scala programmers by and large have gotten used to the status quo and see little need for change. But for newcomers this status quo presents a big hurdle. I believe if we want to overcome that hurdle, we should take a step back and allow ourselves to consider a radically new design. + +## The New Design + +The following pages introduce a redesign of contextual abstractions in Scala. They introduce four fundamental changes: + +1. [Given Instances](./givens.md) are a new way to define basic terms that can be synthesized. They replace implicit definitions. The core principle of the proposal is that, rather than mixing the `implicit` modifier with a large number of features, we have a single way to define terms that can be synthesized for types. + +2. [Using Clauses](./using-clauses.md) are a new syntax for implicit _parameters_ and their _arguments_. It unambiguously aligns parameters and arguments, solving a number of language warts. It also allows us to have several `using` clauses in a definition. + +3. ["Given" Imports](./given-imports.md) are a new class of import selectors that specifically import + givens and nothing else. + +4. [Implicit Conversions](./conversions.md) are now expressed as given instances of a standard `Conversion` class. All other forms of implicit conversions will be phased out. + +This section also contains pages describing other language features that are related to context abstraction. These are: + +- [Context Bounds](./context-bounds.md), which carry over unchanged. +- [Extension Methods](./extension-methods.md) replace implicit classes in a way that integrates better with type classes. +- [Implementing Type Classes](./type-classes.md) demonstrates how some common type classes can be implemented using the new constructs. +- [Type Class Derivation](./derivation.md) introduces constructs to automatically derive type class instances for ADTs. +- [Multiversal Equality](./multiversal-equality.md) introduces a special type class to support type safe equality. +- [Context Functions](./context-functions.md) provide a way to abstract over context parameters. +- [By-Name Context Parameters](./by-name-context-parameters.md) are an essential tool to define recursive synthesized values without looping. +- [Relationship with Scala 2 Implicits](./relationship-implicits.md) discusses the relationship between old-style implicits and new-style givens and how to migrate from one to the other. + +Overall, the new design achieves a better separation of term inference from the rest of the language: There is a single way to define givens instead of a multitude of forms all taking an `implicit` modifier. There is a single way to introduce implicit parameters and arguments instead of conflating implicit with normal arguments. There is a separate way to import givens that does not allow them to hide in a sea of normal imports. And there is a single way to define an implicit conversion which is clearly marked as such and does not require special syntax. + +This design thus avoids feature interactions and makes the language more consistent and orthogonal. It will make implicits easier to learn and harder to abuse. It will greatly improve the clarity of the 95% of Scala programs that use implicits. It has thus the potential to fulfil the promise of term inference in a principled way that is also accessible and friendly. + +Could we achieve the same goals by tweaking existing implicits? After having tried for a long time, I believe now that this is impossible. + +- First, some of the problems are clearly syntactic and require different syntax to solve them. +- Second, there is the problem how to migrate. We cannot change the rules in mid-flight. At some stage of language evolution we need to accommodate both the new and the old rules. With a syntax change, this is easy: Introduce the new syntax with new rules, support the old syntax for a while to facilitate cross compilation, deprecate and phase out the old syntax at some later time. Keeping the same syntax does not offer this path, and in fact does not seem to offer any viable path for evolution +- Third, even if we would somehow succeed with migration, we still have the problem + how to teach this. We cannot make existing tutorials go away. Almost all existing tutorials start with implicit conversions, which will go away; they use normal imports, which will go away, and they explain calls to methods with implicit parameters by expanding them to plain applications, which will also go away. This means that we'd have + to add modifications and qualifications to all existing literature and courseware, likely causing more confusion with beginners instead of less. By contrast, with a new syntax there is a clear criterion: Any book or courseware that mentions `implicit` is outdated and should be updated. diff --git a/docs/_spec/TODOreference/contextual/conversions.md b/docs/_spec/TODOreference/contextual/conversions.md new file mode 100644 index 000000000000..1ce8d42074e7 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/conversions.md @@ -0,0 +1,76 @@ +--- +layout: doc-page +title: "Implicit Conversions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/conversions.html +--- + +Implicit conversions are defined by given instances of the `scala.Conversion` class. +This class is defined in package `scala` as follows: +```scala +abstract class Conversion[-T, +U] extends (T => U): + def apply (x: T): U +``` +For example, here is an implicit conversion from `String` to `Token`: +```scala +given Conversion[String, Token] with + def apply(str: String): Token = new KeyWord(str) +``` +Using an alias this can be expressed more concisely as: +```scala +given Conversion[String, Token] = new KeyWord(_) +``` +An implicit conversion is applied automatically by the compiler in three situations: + +1. If an expression `e` has type `T`, and `T` does not conform to the expression's expected type `S`. +2. In a selection `e.m` with `e` of type `T`, but `T` defines no member `m`. +3. In an application `e.m(args)` with `e` of type `T`, if `T` does define + some member(s) named `m`, but none of these members can be applied to the arguments `args`. + +In the first case, the compiler looks for a given `scala.Conversion` instance that maps +an argument of type `T` to type `S`. In the second and third +case, it looks for a given `scala.Conversion` instance that maps an argument of type `T` +to a type that defines a member `m` which can be applied to `args` if present. +If such an instance `C` is found, the expression `e` is replaced by `C.apply(e)`. + +## Examples + +1. The `Predef` package contains "auto-boxing" conversions that map +primitive number types to subclasses of `java.lang.Number`. For instance, the +conversion from `Int` to `java.lang.Integer` can be defined as follows: + ```scala + given int2Integer: Conversion[Int, java.lang.Integer] = + java.lang.Integer.valueOf(_) + ``` + +2. The "magnet" pattern is sometimes used to express many variants of a method. Instead of defining overloaded versions of the method, one can also let the method take one or more arguments of specially defined "magnet" types, into which various argument types can be converted. Example: + ```scala + object Completions: + + // The argument "magnet" type + enum CompletionArg: + case Error(s: String) + case Response(f: Future[HttpResponse]) + case Status(code: Future[StatusCode]) + + object CompletionArg: + + // conversions defining the possible arguments to pass to `complete` + // these always come with CompletionArg + // They can be invoked explicitly, e.g. + // + // CompletionArg.fromStatusCode(statusCode) + + given fromString : Conversion[String, CompletionArg] = Error(_) + given fromFuture : Conversion[Future[HttpResponse], CompletionArg] = Response(_) + given fromStatusCode: Conversion[Future[StatusCode], CompletionArg] = Status(_) + end CompletionArg + import CompletionArg.* + + def complete[T](arg: CompletionArg) = arg match + case Error(s) => ... + case Response(f) => ... + case Status(code) => ... + + end Completions + ``` +This setup is more complicated than simple overloading of `complete`, but it can still be useful if normal overloading is not available (as in the case above, since we cannot have two overloaded methods that take `Future[...]` arguments), or if normal overloading would lead to a combinatorial explosion of variants. diff --git a/docs/_spec/TODOreference/contextual/derivation-macro.md b/docs/_spec/TODOreference/contextual/derivation-macro.md new file mode 100644 index 000000000000..be7565616913 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/derivation-macro.md @@ -0,0 +1,205 @@ +--- +layout: doc-page +title: "How to write a type class `derived` method using macros" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/derivation-macro.html +--- + +In the main [derivation](./derivation.md) documentation page, we explained the +details behind `Mirror`s and type class derivation. Here we demonstrate how to +implement a type class `derived` method using macros only. We follow the same +example of deriving `Eq` instances and for simplicity we support a `Product` +type e.g., a case class `Person`. The low-level method we will use to implement +the `derived` method exploits quotes, splices of both expressions and types and +the `scala.quoted.Expr.summon` method which is the equivalent of +`summonFrom`. The former is suitable for use in a quote context, used within +macros. + +As in the original code, the type class definition is the same: + +```scala +trait Eq[T]: + def eqv(x: T, y: T): Boolean +``` + +we need to implement a method `Eq.derived` on the companion object of `Eq` that +produces a quoted instance for `Eq[T]`. Here is a possible signature, + +```scala +given derived[T: Type](using Quotes): Expr[Eq[T]] +``` + +and for comparison reasons we give the same signature we had with `inline`: + +```scala +inline given derived[T](using Mirror.Of[T]): Eq[T] = ??? +``` + +Note, that since a type is used in a subsequent stage it will need to be lifted +to a `Type` by using the corresponding context bound. Also, note that we can +summon the quoted `Mirror` inside the body of the `derived` thus we can omit it +from the signature. The body of the `derived` method is shown below: + + +```scala +given derived[T: Type](using Quotes): Expr[Eq[T]] = + import quotes.reflect.* + + val ev: Expr[Mirror.Of[T]] = Expr.summon[Mirror.Of[T]].get + + ev match + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + def eqProductBody(x: Expr[Product], y: Expr[Product])(using Quotes): Expr[Boolean] = { + elemInstances.zipWithIndex.foldLeft(Expr(true)) { + case (acc, ('{ $elem: Eq[t] }, index)) => + val indexExpr = Expr(index) + val e1 = '{ $x.productElement($indexExpr).asInstanceOf[t] } + val e2 = '{ $y.productElement($indexExpr).asInstanceOf[t] } + '{ $acc && $elem.eqv($e1, $e2) } + } + } + '{ eqProduct((x: T, y: T) => ${eqProductBody('x.asExprOf[Product], 'y.asExprOf[Product])}) } + + // case for Mirror.ProductOf[T] + // ... +``` + +Note, that in the `inline` case we can merely write +`summonAll[m.MirroredElemTypes]` inside the inline method but here, since +`Expr.summon` is required, we can extract the element types in a macro fashion. +Being inside a macro, our first reaction would be to write the code below. Since +the path inside the type argument is not stable this cannot be used: + +```scala +'{ + summonAll[$m.MirroredElemTypes] +} +``` + +Instead we extract the tuple-type for element types using pattern matching over +quotes and more specifically of the refined type: + +```scala + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => ... +``` + +Shown below is the implementation of `summonAll` as a macro. We assume that +given instances for our primitive types exist. + +```scala +def summonAll[T: Type](using Quotes): List[Expr[Eq[_]]] = + Type.of[T] match + case '[String *: tpes] => '{ summon[Eq[String]] } :: summonAll[tpes] + case '[Int *: tpes] => '{ summon[Eq[Int]] } :: summonAll[tpes] + case '[tpe *: tpes] => derived[tpe] :: summonAll[tpes] + case '[EmptyTuple] => Nil +``` + +One additional difference with the body of `derived` here as opposed to the one +with `inline` is that with macros we need to synthesize the body of the code during the +macro-expansion time. That is the rationale behind the `eqProductBody` function. +Assuming that we calculate the equality of two `Person`s defined with a case +class that holds a name of type [`String`](https://scala-lang.org/api/3.x/scala/Predef$.html#String-0) +and an age of type `Int`, the equality check we want to generate is the following: + +```scala + true + && Eq[String].eqv(x.productElement(0),y.productElement(0)) + && Eq[Int].eqv(x.productElement(1), y.productElement(1)) +``` + +## Calling the derived method inside the macro + +Following the rules in [Macros](../metaprogramming/metaprogramming.md) we create two methods. +One that hosts the top-level splice `eqv` and one that is the implementation. +Alternatively and what is shown below is that we can call the `eqv` method +directly. The `eqGen` can trigger the derivation. + +```scala +extension [T](inline x: T) + inline def === (inline y: T)(using eq: Eq[T]): Boolean = eq.eqv(x, y) + +inline given eqGen[T]: Eq[T] = ${ Eq.derived[T] } +``` + +Note, that we use inline method syntax and we can compare instance such as +`Sm(Person("Test", 23)) === Sm(Person("Test", 24))` for e.g., the following two +types: + +```scala +case class Person(name: String, age: Int) + +enum Opt[+T]: + case Sm(t: T) + case Nn +``` + +The full code is shown below: + +```scala +import scala.deriving.* +import scala.quoted.* + + +trait Eq[T]: + def eqv(x: T, y: T): Boolean + +object Eq: + given Eq[String] with + def eqv(x: String, y: String) = x == y + + given Eq[Int] with + def eqv(x: Int, y: Int) = x == y + + def eqProduct[T](body: (T, T) => Boolean): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = body(x, y) + + def eqSum[T](body: (T, T) => Boolean): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = body(x, y) + + def summonAll[T: Type](using Quotes): List[Expr[Eq[_]]] = + Type.of[T] match + case '[String *: tpes] => '{ summon[Eq[String]] } :: summonAll[tpes] + case '[Int *: tpes] => '{ summon[Eq[Int]] } :: summonAll[tpes] + case '[tpe *: tpes] => derived[tpe] :: summonAll[tpes] + case '[EmptyTuple] => Nil + + given derived[T: Type](using q: Quotes): Expr[Eq[T]] = + import quotes.reflect.* + + val ev: Expr[Mirror.Of[T]] = Expr.summon[Mirror.Of[T]].get + + ev match + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + val eqProductBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => + elemInstances.zipWithIndex.foldLeft(Expr(true: Boolean)) { + case (acc, (elem, index)) => + val e1 = '{$x.asInstanceOf[Product].productElement(${Expr(index)})} + val e2 = '{$y.asInstanceOf[Product].productElement(${Expr(index)})} + + '{ $acc && $elem.asInstanceOf[Eq[Any]].eqv($e1, $e2) } + } + '{ eqProduct((x: T, y: T) => ${eqProductBody('x, 'y)}) } + + case '{ $m: Mirror.SumOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + val eqSumBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => + val ordx = '{ $m.ordinal($x) } + val ordy = '{ $m.ordinal($y) } + + val elements = Expr.ofList(elemInstances) + '{ $ordx == $ordy && $elements($ordx).asInstanceOf[Eq[Any]].eqv($x, $y) } + + '{ eqSum((x: T, y: T) => ${eqSumBody('x, 'y)}) } + end derived +end Eq + +object Macro3: + extension [T](inline x: T) + inline def === (inline y: T)(using eq: Eq[T]): Boolean = eq.eqv(x, y) + + inline given eqGen[T]: Eq[T] = ${ Eq.derived[T] } +``` diff --git a/docs/_spec/TODOreference/contextual/derivation.md b/docs/_spec/TODOreference/contextual/derivation.md new file mode 100644 index 000000000000..bad47dcb0096 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/derivation.md @@ -0,0 +1,425 @@ +--- +layout: doc-page +title: "Type Class Derivation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/derivation.html +--- + +Type class derivation is a way to automatically generate given instances for type classes which satisfy some simple +conditions. A type class in this sense is any trait or class with a type parameter determining the type being operated +on. Common examples are `Eq`, `Ordering`, or `Show`. For example, given the following `Tree` algebraic data type +(ADT), + +```scala +enum Tree[T] derives Eq, Ordering, Show: + case Branch(left: Tree[T], right: Tree[T]) + case Leaf(elem: T) +``` + +The `derives` clause generates the following given instances for the `Eq`, `Ordering` and `Show` type classes in the +companion object of `Tree`, + +```scala +given [T: Eq] : Eq[Tree[T]] = Eq.derived +given [T: Ordering] : Ordering[Tree[T]] = Ordering.derived +given [T: Show] : Show[Tree[T]] = Show.derived +``` + +We say that `Tree` is the _deriving type_ and that the `Eq`, `Ordering` and `Show` instances are _derived instances_. + +## Types supporting `derives` clauses + +All data types can have a `derives` clause. This document focuses primarily on data types which also have a given instance +of the `Mirror` type class available. + +`Mirror` type class instances provide information at the type level about the components and labelling of the type. +They also provide minimal term level infrastructure to allow higher level libraries to provide comprehensive +derivation support. + +Instances of the `Mirror` type class are generated automatically by the compiler +unconditionally for: +- enums and enum cases, +- case objects. + +Instances for `Mirror` are also generated conditionally for: +- case classes where the constructor is visible at the callsite (always true if the companion is not a case object) +- sealed classes and sealed traits where: + - there exists at least one child case, + - each child case is reachable from the parent's definition, + - if the sealed trait/class has no companion, then each child case is reachable from the callsite through the prefix of the type being mirrored, + - and where the compiler can generate a `Mirror` type class instance for each child case. + + +The `Mirror` type class definition is as follows: + +```scala +sealed trait Mirror: + + /** the type being mirrored */ + type MirroredType + + /** the type of the elements of the mirrored type */ + type MirroredElemTypes + + /** The mirrored *-type */ + type MirroredMonoType + + /** The name of the type */ + type MirroredLabel <: String + + /** The names of the elements of the type */ + type MirroredElemLabels <: Tuple + +object Mirror: + + /** The Mirror for a product type */ + trait Product extends Mirror: + + /** Create a new instance of type `T` with elements + * taken from product `p`. + */ + def fromProduct(p: scala.Product): MirroredMonoType + + trait Sum extends Mirror: + + /** The ordinal number of the case class of `x`. + * For enums, `ordinal(x) == x.ordinal` + */ + def ordinal(x: MirroredMonoType): Int + +end Mirror +``` + +Product types (i.e. case classes and objects, and enum cases) have mirrors which are subtypes of `Mirror.Product`. Sum +types (i.e. sealed class or traits with product children, and enums) have mirrors which are subtypes of `Mirror.Sum`. + +For the `Tree` ADT from above the following `Mirror` instances will be automatically provided by the compiler, + +```scala +// Mirror for Tree +new Mirror.Sum: + type MirroredType = Tree + type MirroredElemTypes[T] = (Branch[T], Leaf[T]) + type MirroredMonoType = Tree[_] + type MirroredLabel = "Tree" + type MirroredElemLabels = ("Branch", "Leaf") + + def ordinal(x: MirroredMonoType): Int = x match + case _: Branch[_] => 0 + case _: Leaf[_] => 1 + +// Mirror for Branch +new Mirror.Product: + type MirroredType = Branch + type MirroredElemTypes[T] = (Tree[T], Tree[T]) + type MirroredMonoType = Branch[_] + type MirroredLabel = "Branch" + type MirroredElemLabels = ("left", "right") + + def fromProduct(p: Product): MirroredMonoType = + new Branch(...) + +// Mirror for Leaf +new Mirror.Product: + type MirroredType = Leaf + type MirroredElemTypes[T] = Tuple1[T] + type MirroredMonoType = Leaf[_] + type MirroredLabel = "Leaf" + type MirroredElemLabels = Tuple1["elem"] + + def fromProduct(p: Product): MirroredMonoType = + new Leaf(...) +``` + +If a Mirror cannot be generated automatically for a given type, an error will appear explaining why it is neither a supported +sum type nor a product type. For example, if `A` is a trait that is not sealed, + +``` +No given instance of type deriving.Mirror.Of[A] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[A]: + * trait A is not a generic product because it is not a case class + * trait A is not a generic sum because it is not a sealed trait +``` + + +Note the following properties of `Mirror` types, + ++ Properties are encoded using types rather than terms. This means that they have no runtime footprint unless used and + also that they are a compile time feature for use with Scala 3's metaprogramming facilities. ++ There is no restriction against the mirrored type being a local or inner class. ++ The kinds of `MirroredType` and `MirroredElemTypes` match the kind of the data type the mirror is an instance for. + This allows `Mirror`s to support ADTs of all kinds. ++ There is no distinct representation type for sums or products (ie. there is no `HList` or `Coproduct` type as in + Scala 2 versions of Shapeless). Instead the collection of child types of a data type is represented by an ordinary, + possibly parameterized, tuple type. Scala 3's metaprogramming facilities can be used to work with these tuple types + as-is, and higher level libraries can be built on top of them. ++ For both product and sum types, the elements of `MirroredElemTypes` are arranged in definition order (i.e. `Branch[T]` + precedes `Leaf[T]` in `MirroredElemTypes` for `Tree` because `Branch` is defined before `Leaf` in the source file). + This means that `Mirror.Sum` differs in this respect from Shapeless's generic representation for ADTs in Scala 2, + where the constructors are ordered alphabetically by name. ++ The methods `ordinal` and `fromProduct` are defined in terms of `MirroredMonoType` which is the type of kind-`*` + which is obtained from `MirroredType` by wildcarding its type parameters. + +## Type classes supporting automatic deriving + +A trait or class can appear in a `derives` clause if its companion object defines a method named `derived`. The +signature and implementation of a `derived` method for a type class `TC[_]` are arbitrary but it is typically of the +following form, + +```scala +import scala.deriving.Mirror + +inline def derived[T](using Mirror.Of[T]): TC[T] = ... +``` + +That is, the `derived` method takes a context parameter of (some subtype of) type `Mirror` which defines the shape of +the deriving type `T`, and computes the type class implementation according to that shape. This is all that the +provider of an ADT with a `derives` clause has to know about the derivation of a type class instance. + +Note that `derived` methods may have context `Mirror` parameters indirectly (e.g. by having a context argument which in turn +has a context `Mirror` parameter, or not at all (e.g. they might use some completely different user-provided mechanism, for +instance using Scala 3 macros or runtime reflection). We expect that (direct or indirect) `Mirror` based implementations +will be the most common and that is what this document emphasises. + +Type class authors will most likely use higher level derivation or generic programming libraries to implement +`derived` methods. An example of how a `derived` method might be implemented using _only_ the low level facilities +described above and Scala 3's general metaprogramming features is provided below. It is not anticipated that type class +authors would normally implement a `derived` method in this way, however this walkthrough can be taken as a guide for +authors of the higher level derivation libraries that we expect typical type class authors will use (for a fully +worked out example of such a library, see [Shapeless 3](https://github.com/milessabin/shapeless/tree/shapeless-3)). + +## How to write a type class `derived` method using low level mechanisms + +The low-level method we will use to implement a type class `derived` method in this example exploits three new +type-level constructs in Scala 3: inline methods, inline matches, and implicit searches via `summonInline` or `summonFrom`. Given this definition of the +`Eq` type class, + +```scala +trait Eq[T]: + def eqv(x: T, y: T): Boolean +``` + +we need to implement a method `Eq.derived` on the companion object of `Eq` that produces a given instance for `Eq[T]` given +a `Mirror[T]`. Here is a possible implementation, + +```scala +import scala.deriving.Mirror + +inline given derived[T](using m: Mirror.Of[T]): Eq[T] = + val elemInstances = summonAll[m.MirroredElemTypes] // (1) + inline m match // (2) + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) +``` + +Note that `derived` is defined as an `inline` given. This means that the method will be expanded at +call sites (for instance the compiler generated instance definitions in the companion objects of ADTs which have a +`derived Eq` clause), and also that it can be used recursively if necessary, to compute instances for children. + +The body of this method (1) first materializes the `Eq` instances for all the child types of type the instance is +being derived for. This is either all the branches of a sum type or all the fields of a product type. The +implementation of `summonAll` is `inline` and uses Scala 3's `summonInline` construct to collect the instances as a +`List`, + +```scala +inline def summonAll[T <: Tuple]: List[Eq[_]] = + inline erasedValue[T] match + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] +``` + +with the instances for children in hand the `derived` method uses an `inline match` to dispatch to methods which can +construct instances for either sums or products (2). Note that because `derived` is `inline` the match will be +resolved at compile-time and only the left-hand side of the matching case will be inlined into the generated code with +types refined as revealed by the match. + +In the sum case, `eqSum`, we use the runtime `ordinal` values of the arguments to `eqv` to first check if the two +values are of the same subtype of the ADT (3) and then, if they are, to further test for equality based on the `Eq` +instance for the appropriate ADT subtype using the auxiliary method `check` (4). + +```scala +import scala.deriving.Mirror + +def eqSum[T](s: Mirror.SumOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + val ordx = s.ordinal(x) // (3) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) // (4) +``` + +In the product case, `eqProduct` we test the runtime values of the arguments to `eqv` for equality as products based +on the `Eq` instances for the fields of the data type (5), + +```scala +import scala.deriving.Mirror + +def eqProduct[T](p: Mirror.ProductOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { // (5) + case ((x, y), elem) => check(elem)(x, y) + } +``` + +Pulling this all together we have the following complete implementation, + +```scala +import scala.deriving.* +import scala.compiletime.{erasedValue, summonInline} + +inline def summonAll[T <: Tuple]: List[Eq[_]] = + inline erasedValue[T] match + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] + +trait Eq[T]: + def eqv(x: T, y: T): Boolean + +object Eq: + given Eq[Int] with + def eqv(x: Int, y: Int) = x == y + + def check(elem: Eq[_])(x: Any, y: Any): Boolean = + elem.asInstanceOf[Eq[Any]].eqv(x, y) + + def iterator[T](p: T) = p.asInstanceOf[Product].productIterator + + def eqSum[T](s: Mirror.SumOf[T], elems: => List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + val ordx = s.ordinal(x) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) + + def eqProduct[T](p: Mirror.ProductOf[T], elems: => List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { + case ((x, y), elem) => check(elem)(x, y) + } + + inline given derived[T](using m: Mirror.Of[T]): Eq[T] = + lazy val elemInstances = summonAll[m.MirroredElemTypes] + inline m match + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) +end Eq +``` + +we can test this relative to a simple ADT like so, + +```scala +enum Opt[+T] derives Eq: + case Sm(t: T) + case Nn + +@main def test(): Unit = + import Opt.* + val eqoi = summon[Eq[Opt[Int]]] + assert(eqoi.eqv(Sm(23), Sm(23))) + assert(!eqoi.eqv(Sm(23), Sm(13))) + assert(!eqoi.eqv(Sm(23), Nn)) +``` + +In this case the code that is generated by the inline expansion for the derived `Eq` instance for `Opt` looks like the +following, after a little polishing, + +```scala +given derived$Eq[T](using eqT: Eq[T]): Eq[Opt[T]] = + eqSum( + summon[Mirror[Opt[T]]], + List( + eqProduct(summon[Mirror[Sm[T]]], List(summon[Eq[T]])), + eqProduct(summon[Mirror[Nn.type]], Nil) + ) + ) +``` + +Alternative approaches can be taken to the way that `derived` methods can be defined. For example, more aggressively +inlined variants using Scala 3 macros, whilst being more involved for type class authors to write than the example +above, can produce code for type classes like `Eq` which eliminate all the abstraction artefacts (eg. the `Lists` of +child instances in the above) and generate code which is indistinguishable from what a programmer might write by hand. +As a third example, using a higher level library such as Shapeless the type class author could define an equivalent +`derived` method as, + +```scala +given eqSum[A](using inst: => K0.CoproductInstances[Eq, A]): Eq[A] with + def eqv(x: A, y: A): Boolean = inst.fold2(x, y)(false)( + [t] => (eqt: Eq[t], t0: t, t1: t) => eqt.eqv(t0, t1) + ) + +given eqProduct[A](using inst: K0.ProductInstances[Eq, A]): Eq[A] with + def eqv(x: A, y: A): Boolean = inst.foldLeft2(x, y)(true: Boolean)( + [t] => (acc: Boolean, eqt: Eq[t], t0: t, t1: t) => + Complete(!eqt.eqv(t0, t1))(false)(true) + ) + +inline def derived[A](using gen: K0.Generic[A]): Eq[A] = + gen.derive(eqProduct, eqSum) +``` + +The framework described here enables all three of these approaches without mandating any of them. + +For a brief discussion on how to use macros to write a type class `derived` +method please read more at [How to write a type class `derived` method using macros](./derivation-macro.md). + +## Deriving instances elsewhere + +Sometimes one would like to derive a type class instance for an ADT after the ADT is defined, without being able to +change the code of the ADT itself. To do this, simply define an instance using the `derived` method of the type class +as right-hand side. E.g, to implement `Ordering` for `Option` define, + +```scala +given [T: Ordering]: Ordering[Option[T]] = Ordering.derived +``` + +Assuming the `Ordering.derived` method has a context parameter of type `Mirror[T]` it will be satisfied by the +compiler generated `Mirror` instance for `Option` and the derivation of the instance will be expanded on the right +hand side of this definition in the same way as an instance defined in ADT companion objects. + +## Syntax + +``` +Template ::= InheritClauses [TemplateBody] +EnumDef ::= id ClassConstr InheritClauses EnumBody +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp {‘with’ ConstrApp} + | ConstrApp {‘,’ ConstrApp} +``` + +**Note:** To align `extends` clauses and `derives` clauses, Scala 3 also allows multiple +extended types to be separated by commas. So the following is now legal: + +```scala +class A extends B, C { ... } +``` + +It is equivalent to the old form + +```scala +class A extends B with C { ... } +``` + +## Discussion + +This type class derivation framework is intentionally very small and low-level. There are essentially two pieces of +infrastructure in compiler-generated `Mirror` instances, + ++ type members encoding properties of the mirrored types. ++ a minimal value level mechanism for working generically with terms of the mirrored types. + +The `Mirror` infrastructure can be seen as an extension of the existing `Product` infrastructure for case classes: +typically `Mirror` types will be implemented by the ADTs companion object, hence the type members and the `ordinal` or +`fromProduct` methods will be members of that object. The primary motivation for this design decision, and the +decision to encode properties via types rather than terms was to keep the bytecode and runtime footprint of the +feature small enough to make it possible to provide `Mirror` instances _unconditionally_. + +Whilst `Mirrors` encode properties precisely via type members, the value level `ordinal` and `fromProduct` are +somewhat weakly typed (because they are defined in terms of `MirroredMonoType`) just like the members of `Product`. +This means that code for generic type classes has to ensure that type exploration and value selection proceed in +lockstep and it has to assert this conformance in some places using casts. If generic type classes are correctly +written these casts will never fail. + +As mentioned, however, the compiler-provided mechanism is intentionally very low level and it is anticipated that +higher level type class derivation and generic programming libraries will build on this and Scala 3's other +metaprogramming facilities to hide these low-level details from type class authors and general users. Type class +derivation in the style of both Shapeless and Magnolia are possible (a prototype of Shapeless 3, which combines +aspects of both Shapeless 2 and Magnolia has been developed alongside this language feature) as is a more aggressively +inlined style, supported by Scala 3's new quote/splice macro and inlining facilities. diff --git a/docs/_spec/TODOreference/contextual/extension-methods.md b/docs/_spec/TODOreference/contextual/extension-methods.md new file mode 100644 index 000000000000..d23cadf513d7 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/extension-methods.md @@ -0,0 +1,306 @@ +--- +layout: doc-page +title: "Extension Methods" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/extension-methods.html +--- + +Extension methods allow one to add methods to a type after the type is defined. Example: + +```scala +case class Circle(x: Double, y: Double, radius: Double) + +extension (c: Circle) + def circumference: Double = c.radius * math.Pi * 2 +``` + +Like regular methods, extension methods can be invoked with infix `.`: + +```scala +val circle = Circle(0, 0, 1) +circle.circumference +``` + +## Translation of Extension Methods + +An extension method translates to a specially labelled method that takes the leading parameter section as its first argument list. The label, expressed +as `` here, is compiler-internal. So, the definition of `circumference` above translates to the following method, and can also be invoked as such: + +``` + def circumference(c: Circle): Double = c.radius * math.Pi * 2 + +assert(circle.circumference == circumference(circle)) +``` + +## Operators + +The extension method syntax can also be used to define operators. Examples: + +```scala +extension (x: String) + def < (y: String): Boolean = ... +extension (x: Elem) + def +: (xs: Seq[Elem]): Seq[Elem] = ... +extension (x: Number) + infix def min (y: Number): Number = ... + +"ab" < "c" +1 +: List(2, 3) +x min 3 +``` + +The three definitions above translate to + +``` + def < (x: String)(y: String): Boolean = ... + def +: (xs: Seq[Elem])(x: Elem): Seq[Elem] = ... + infix def min(x: Number)(y: Number): Number = ... +``` + +Note the swap of the two parameters `x` and `xs` when translating +the right-associative operator `+:` to an extension method. This is analogous +to the implementation of right binding operators as normal methods. The Scala +compiler preprocesses an infix operation `x +: xs` to `xs.+:(x)`, so the extension +method ends up being applied to the sequence as first argument (in other words, the +two swaps cancel each other out). See [here for details](./right-associative-extension-methods.md). + +## Generic Extensions + +It is also possible to extend generic types by adding type parameters to an extension. For instance: + +```scala +extension [T](xs: List[T]) + def second = xs.tail.head + +extension [T: Numeric](x: T) + def + (y: T): T = summon[Numeric[T]].plus(x, y) +``` + +Type parameters on extensions can also be combined with type parameters on the methods +themselves: + +```scala +extension [T](xs: List[T]) + def sumBy[U: Numeric](f: T => U): U = ... +``` + +Type arguments matching method type parameters are passed as usual: + +```scala +List("a", "bb", "ccc").sumBy[Int](_.length) +``` + +By contrast, type arguments matching type parameters following `extension` can be passed +only if the method is referenced as a non-extension method: + +```scala +sumBy[String](List("a", "bb", "ccc"))(_.length) +``` + +Or, when passing both type arguments: + +```scala +sumBy[String](List("a", "bb", "ccc"))[Int](_.length) +``` + +Extensions can also take using clauses. For instance, the `+` extension above could equivalently be written with a using clause: + +```scala +extension [T](x: T)(using n: Numeric[T]) + def + (y: T): T = n.plus(x, y) +``` + +## Collective Extensions + +Sometimes, one wants to define several extension methods that share the same +left-hand parameter type. In this case one can "pull out" the common parameters into +a single extension and enclose all methods in braces or an indented region. +Example: + +```scala +extension (ss: Seq[String]) + + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + + def longestString: String = longestStrings.head +``` + +The same can be written with braces as follows (note that indented regions can still be used inside braces): + +```scala +extension (ss: Seq[String]) { + + def longestStrings: Seq[String] = { + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + } + + def longestString: String = longestStrings.head +} +``` + +Note the right-hand side of `longestString`: it calls `longestStrings` directly, implicitly +assuming the common extended value `ss` as receiver. + +Collective extensions like these are a shorthand for individual extensions +where each method is defined separately. For instance, the first extension above expands to: + +```scala +extension (ss: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + +extension (ss: Seq[String]) + def longestString: String = ss.longestStrings.head +``` + +Collective extensions also can take type parameters and have using clauses. Example: + +```scala +extension [T](xs: List[T])(using Ordering[T]) + def smallest(n: Int): List[T] = xs.sorted.take(n) + def smallestIndices(n: Int): List[Int] = + val limit = smallest(n).max + xs.zipWithIndex.collect { case (x, i) if x <= limit => i } +``` + +## Translation of Calls to Extension Methods + +To convert a reference to an extension method, the compiler has to know about the extension +method. We say in this case that the extension method is _applicable_ at the point of reference. +There are four possible ways for an extension method to be applicable: + + 1. The extension method is visible under a simple name, by being defined or inherited or imported in a scope enclosing the reference. + 2. The extension method is a member of some given + instance that is visible at the point of the reference. + 3. The reference is of the form `r.m` and the extension method + is defined in the implicit scope of the type of `r`. + 4. The reference is of the form `r.m` and the extension method + is defined in some given instance in the implicit scope of the type of `r`. + +Here is an example for the first rule: + +```scala +trait IntOps: + extension (i: Int) def isZero: Boolean = i == 0 + + extension (i: Int) def safeMod(x: Int): Option[Int] = + // extension method defined in same scope IntOps + if x.isZero then None + else Some(i % x) + +object IntOpsEx extends IntOps: + extension (i: Int) def safeDiv(x: Int): Option[Int] = + // extension method brought into scope via inheritance from IntOps + if x.isZero then None + else Some(i / x) + +trait SafeDiv: + import IntOpsEx.* // brings safeDiv and safeMod into scope + + extension (i: Int) def divide(d: Int): Option[(Int, Int)] = + // extension methods imported and thus in scope + (i.safeDiv(d), i.safeMod(d)) match + case (Some(d), Some(r)) => Some((d, r)) + case _ => None +``` + +By the second rule, an extension method can be made available by defining a given instance containing it, like this: + +```scala +given ops1: IntOps() // brings safeMod into scope + +1.safeMod(2) +``` + +By the third and fourth rule, an extension method is available if it is in the implicit scope of the receiver type or in a given instance in that scope. Example: + +```scala +class List[T]: + ... +object List: + ... + extension [T](xs: List[List[T]]) + def flatten: List[T] = xs.foldLeft(List.empty[T])(_ ++ _) + + given [T: Ordering]: Ordering[List[T]] with + extension (xs: List[T]) + def < (ys: List[T]): Boolean = ... +end List + +// extension method available since it is in the implicit scope +// of List[List[Int]] +List(List(1, 2), List(3, 4)).flatten + +// extension method available since it is in the given Ordering[List[T]], +// which is itself in the implicit scope of List[Int] +List(1, 2) < List(3) +``` + +The precise rules for resolving a selection to an extension method are as follows. + +Assume a selection `e.m[Ts]` where `m` is not a member of `e`, where the type arguments `[Ts]` are optional, and where `T` is the expected type. +The following two rewritings are tried in order: + + 1. The selection is rewritten to `m[Ts](e)`. + 2. If the first rewriting does not typecheck with expected type `T`, + and there is an extension method `m` in some eligible object `o`, the selection is rewritten to `o.m[Ts](e)`. An object `o` is _eligible_ if + + - `o` forms part of the implicit scope of `T`, or + - `o` is a given instance that is visible at the point of the application, or + - `o` is a given instance in the implicit scope of `T`. + + This second rewriting is attempted at the time where the compiler also tries an implicit conversion + from `T` to a type containing `m`. If there is more than one way of rewriting, an ambiguity error results. + +An extension method can also be referenced using a simple identifier without a preceding expression. If an identifier `g` appears in the body of an extension method `f` and refers to an extension method `g` that is defined in the same collective extension + +```scala +extension (x: T) + def f ... = ... g ... + def g ... +``` + +the identifier is rewritten to `x.g`. This is also the case if `f` and `g` are the same method. Example: + +```scala +extension (s: String) + def position(ch: Char, n: Int): Int = + if n < s.length && s(n) != ch then position(ch, n + 1) + else n +``` + +The recursive call `position(ch, n + 1)` expands to `s.position(ch, n + 1)` in this case. The whole extension method rewrites to + +```scala +def position(s: String)(ch: Char, n: Int): Int = + if n < s.length && s(n) != ch then position(s)(ch, n + 1) + else n +``` + +## Syntax + +Here are the syntax changes for extension methods and collective extensions relative +to the [current syntax](../syntax.md). + +``` +BlockStat ::= ... | Extension +TemplateStat ::= ... | Extension +TopStat ::= ... | Extension +Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} + ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods +ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef +``` + +In the above the notation `<<< ts >>>` in the production rule `ExtMethods` is defined as follows : + +``` +<<< ts >>> ::= ‘{’ ts ‘}’ | indent ts outdent +``` + +`extension` is a soft keyword. It is recognized as a keyword only if it appears +at the start of a statement and is followed by `[` or `(`. In all other cases +it is treated as an identifier. diff --git a/docs/_spec/TODOreference/contextual/given-imports.md b/docs/_spec/TODOreference/contextual/given-imports.md new file mode 100644 index 000000000000..6a55368979b1 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/given-imports.md @@ -0,0 +1,117 @@ +--- +layout: doc-page +title: "Importing Givens" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/given-imports.html +--- + +A special form of import wildcard selector is used to import given instances. Example: + +```scala +object A: + class TC + given tc: TC = ??? + def f(using TC) = ??? + +object B: + import A.* + import A.given + ... +``` + +In the code above, the `import A.*` clause in object `B` imports all members +of `A` _except_ the given instance `tc`. Conversely, the second import `import A.given` will import _only_ that given instance. +The two import clauses can also be merged into one: + +```scala +object B: + import A.{given, *} + ... +``` + +Generally, a normal wildcard selector `*` brings all definitions other than givens or extensions into scope +whereas a `given` selector brings all givens (including those resulting from extensions) into scope. + +There are two main benefits arising from these rules: + +- It is made clearer where givens in scope are coming from. + In particular, it is not possible to hide imported givens in a long list of regular wildcard imports. +- It enables importing all givens + without importing anything else. This is particularly important since givens + can be anonymous, so the usual recourse of using named imports is not + practical. + +## Importing By Type + +Since givens can be anonymous it is not always practical to import them by their name, and wildcard imports are typically used instead. By-type imports provide a more specific alternative to wildcard imports, which makes it clearer what is imported. Example: + +```scala +import A.given TC +``` + +This imports any given in `A` that has a type which conforms to `TC`. Importing givens of several types `T1,...,Tn` +is expressed by multiple `given` selectors. + +```scala +import A.{given T1, ..., given Tn} +``` + +Importing all given instances of a parameterized type is expressed by wildcard arguments. +For instance, assuming the object + +```scala +object Instances: + given intOrd: Ordering[Int] = ... + given listOrd[T: Ordering]: Ordering[List[T]] = ... + given ec: ExecutionContext = ... + given im: Monoid[Int] = ... +``` + +the import clause + +```scala +import Instances.{given Ordering[?], given ExecutionContext} +``` + +would import the `intOrd`, `listOrd`, and `ec` instances but leave out the `im` instance, since it fits none of the specified bounds. + +By-type imports can be mixed with by-name imports. If both are present in an import clause, by-type imports come last. For instance, the import clause + +```scala +import Instances.{im, given Ordering[?]} +``` + +would import `im`, `intOrd`, and `listOrd` but leave out `ec`. + +## Migration + +The rules for imports stated above have the consequence that a library +would have to migrate in lockstep with all its users from old style implicits and +normal imports to givens and given imports. + +The following modifications avoid this hurdle to migration. + + 1. A `given` import selector also brings old style implicits into scope. So, in Scala 3.0 + an old-style implicit definition can be brought into scope either by a `*` or a `given` wildcard selector. + + 2. In Scala 3.1, old-style implicits accessed through a `*` wildcard import will give a deprecation warning. + + 3. In some version after 3.1, old-style implicits accessed through a `*` wildcard import will give a compiler error. + +These rules mean that library users can use `given` selectors to access old-style implicits in Scala 3.0, +and will be gently nudged and then forced to do so in later versions. Libraries can then switch to +given instances once their user base has migrated. + +## Syntax + +``` +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*' | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +``` diff --git a/docs/_spec/TODOreference/contextual/givens.md b/docs/_spec/TODOreference/contextual/givens.md new file mode 100644 index 000000000000..411d50ba63ea --- /dev/null +++ b/docs/_spec/TODOreference/contextual/givens.md @@ -0,0 +1,193 @@ +--- +layout: doc-page +title: "Given Instances" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/givens.html +--- + +Given instances (or, simply, "givens") define "canonical" values of certain types +that serve for synthesizing arguments to [context parameters](./using-clauses.md). Example: + +```scala +trait Ord[T]: + def compare(x: T, y: T): Int + extension (x: T) def < (y: T) = compare(x, y) < 0 + extension (x: T) def > (y: T) = compare(x, y) > 0 + +given intOrd: Ord[Int] with + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given listOrd[T](using ord: Ord[T]): Ord[List[T]] with + + def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = ord.compare(x, y) + if fst != 0 then fst else compare(xs1, ys1) + +``` + +This code defines a trait `Ord` with two given instances. `intOrd` defines +a given for the type `Ord[Int]` whereas `listOrd[T]` defines givens +for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]` +themselves. The `using` clause in `listOrd` defines a condition: There must be a +given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. +Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). + +## Anonymous Givens + +The name of a given can be left out. So the definitions +of the last section can also be expressed like this: + +```scala +given Ord[Int] with + ... +given [T](using Ord[T]): Ord[List[T]] with + ... +``` + +If the name of a given is missing, the compiler will synthesize a name from +the implemented type(s). + +**Note** The name synthesized by the compiler is chosen to be readable and reasonably concise. For instance, the two instances above would get the names: + +```scala +given_Ord_Int +given_Ord_List +``` + +The precise rules for synthesizing names are found [here](./relationship-implicits.html#anonymous-given-instances). These rules do not guarantee absence of name conflicts between +given instances of types that are "too similar". To avoid conflicts one can +use named instances. + +**Note** To ensure robust binary compatibility, publicly available libraries should prefer named instances. + +## Alias Givens + +An alias can be used to define a given instance that is equal to some expression. Example: + +```scala +given global: ExecutionContext = ForkJoinPool() +``` + +This creates a given `global` of type `ExecutionContext` that resolves to the right +hand side `ForkJoinPool()`. +The first time `global` is accessed, a new `ForkJoinPool` is created, which is then +returned for this and all subsequent accesses to `global`. This operation is thread-safe. + +Alias givens can be anonymous as well, e.g. + +```scala +given Position = enclosingTree.position +given (using config: Config): Factory = MemoizingFactory(config) +``` + +An alias given can have type parameters and context parameters just like any other given, +but it can only implement a single type. + +## Given Macros + +Given aliases can have the `inline` and `transparent` modifiers. +Example: + +```scala +transparent inline given mkAnnotations[A, T]: Annotations[A, T] = ${ + // code producing a value of a subtype of Annotations +} +``` + +Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. + +Given instances can have the `inline` but not `transparent` modifiers as their type is already known from the signature. +Example: + +```scala +trait Show[T] { + inline def show(x: T): String +} + +inline given Show[Foo] with { + /*transparent*/ inline def show(x: Foo): String = ${ ... } +} + +def app = + // inlines `show` method call and removes the call to `given Show[Foo]` + summon[Show[Foo]].show(foo) +``` +Note that the inline methods within the given instances may be `transparent`. + +The inlining of given instances will not inline/duplicate the implementation of the given, it will just inline the instantiation of that instance. +This is used to help dead code elimination of the given instances that are not used after inlining. + + +## Pattern-Bound Given Instances + +Given instances can also appear in patterns. Example: + +```scala +for given Context <- applicationContexts do + +pair match + case (ctx @ given Context, y) => ... +``` + +In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` +instance named `ctx` is established by matching against the first half of the `pair` selector. + +In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. + +## Negated Givens + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement the analogue of a "negated" search in implicit resolution, +where a query Q1 fails if some other query Q2 succeeds and Q1 succeeds if Q2 fails. With the new cleaned up behavior these techniques no longer work. +But the new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) now implements negation directly. + +For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit +search for `Q` fails, for example: + +```scala +import scala.util.NotGiven + +trait Tagged[A] + +case class Foo[A](value: Boolean) +object Foo: + given fooTagged[A](using Tagged[A]): Foo[A] = Foo(true) + given fooNotTagged[A](using NotGiven[Tagged[A]]): Foo[A] = Foo(false) + +@main def test(): Unit = + given Tagged[Int]() + assert(summon[Foo[Int]].value) // fooTagged is found + assert(!summon[Foo[String]].value) // fooNotTagged is found +``` + +## Given Instance Initialization + +A given instance without type or context parameters is initialized on-demand, the first +time it is accessed. If a given has type or context parameters, a fresh instance +is created for each reference. + +## Syntax + +Here is the syntax for given instances: + +``` +TmplDef ::= ... + | ‘given’ GivenDef +GivenDef ::= [GivenSig] StructuralInstance + | [GivenSig] AnnotType ‘=’ Expr + | [GivenSig] AnnotType +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} ‘with’ TemplateBody +``` + +A given instance starts with the reserved word `given` and an optional _signature_. The signature +defines a name and/or parameters for the instance. It is followed by `:`. There are three kinds +of given instances: + +- A _structural instance_ contains one or more types or constructor applications, + followed by `with` and a template body that contains member definitions of the instance. +- An _alias instance_ contains a type, followed by `=` and a right-hand side expression. +- An _abstract instance_ contains just the type, which is not followed by anything. diff --git a/docs/_spec/TODOreference/contextual/multiversal-equality.md b/docs/_spec/TODOreference/contextual/multiversal-equality.md new file mode 100644 index 000000000000..e9a81b95f472 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/multiversal-equality.md @@ -0,0 +1,227 @@ +--- +layout: doc-page +title: "Multiversal Equality" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/multiversal-equality.html +--- + +Previously, Scala had universal equality: Two values of any types +could be compared with each other with `==` and `!=`. This came from +the fact that `==` and `!=` are implemented in terms of Java's +`equals` method, which can also compare values of any two reference +types. + +Universal equality is convenient. But it is also dangerous since it +undermines type safety. For instance, let's assume one is left after some refactoring +with an erroneous program where a value `y` has type `S` instead of the correct type `T`. + +```scala +val x = ... // of type T +val y = ... // of type S, but should be T +x == y // typechecks, will always yield false +``` + +If `y` gets compared to other values of type `T`, +the program will still typecheck, since values of all types can be compared with each other. +But it will probably give unexpected results and fail at runtime. + +Multiversal equality is an opt-in way to make universal equality safer. +It uses a binary type class [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/CanEqual.scala) +to indicate that values of two given types can be compared with each other. +The example above would not typecheck if `S` or `T` was a class +that derives `CanEqual`, e.g. + +```scala +class T derives CanEqual +``` + +Alternatively, one can also provide a `CanEqual` given instance directly, like this: + +```scala +given CanEqual[T, T] = CanEqual.derived +``` + +This definition effectively says that values of type `T` can (only) be +compared to other values of type `T` when using `==` or `!=`. The definition +affects type checking but it has no significance for runtime +behavior, since `==` always maps to `equals` and `!=` always maps to +the negation of `equals`. The right-hand side `CanEqual.derived` of the definition +is a value that has any `CanEqual` instance as its type. Here is the definition of class +`CanEqual` and its companion object: + +```scala +package scala +import annotation.implicitNotFound + +@implicitNotFound("Values of types ${L} and ${R} cannot be compared with == or !=") +sealed trait CanEqual[-L, -R] + +object CanEqual: + object derived extends CanEqual[Any, Any] +``` + +One can have several `CanEqual` given instances for a type. For example, the four +definitions below make values of type `A` and type `B` comparable with +each other, but not comparable to anything else: + +```scala +given CanEqual[A, A] = CanEqual.derived +given CanEqual[B, B] = CanEqual.derived +given CanEqual[A, B] = CanEqual.derived +given CanEqual[B, A] = CanEqual.derived +``` + +The [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/CanEqual.scala) +object defines a number of `CanEqual` given instances that together +define a rule book for what standard types can be compared (more details below). + +There is also a "fallback" instance named `canEqualAny` that allows comparisons +over all types that do not themselves have a `CanEqual` given. `canEqualAny` is defined as follows: + +```scala +def canEqualAny[L, R]: CanEqual[L, R] = CanEqual.derived +``` + +Even though `canEqualAny` is not declared as `given`, the compiler will still +construct an `canEqualAny` instance as answer to an implicit search for the +type `CanEqual[L, R]`, unless `L` or `R` have `CanEqual` instances +defined on them, or the language feature `strictEquality` is enabled. + +The primary motivation for having `canEqualAny` is backwards compatibility. +If this is of no concern, one can disable `canEqualAny` by enabling the language +feature `strictEquality`. As for all language features this can be either +done with an import + +```scala +import scala.language.strictEquality +``` +or with a command line option `-language:strictEquality`. + +## Deriving CanEqual Instances + +Instead of defining `CanEqual` instances directly, it is often more convenient to derive them. Example: + +```scala +class Box[T](x: T) derives CanEqual +``` + +By the usual rules of [type class derivation](./derivation.md), +this generates the following `CanEqual` instance in the companion object of `Box`: + +```scala +given [T, U](using CanEqual[T, U]): CanEqual[Box[T], Box[U]] = + CanEqual.derived +``` + +That is, two boxes are comparable with `==` or `!=` if their elements are. Examples: + +```scala +new Box(1) == new Box(1L) // ok since there is an instance for `CanEqual[Int, Long]` +new Box(1) == new Box("a") // error: can't compare +new Box(1) == 1 // error: can't compare +``` + +## Precise Rules for Equality Checking + +The precise rules for equality checking are as follows. + +If the `strictEquality` feature is enabled then +a comparison using `x == y` or `x != y` between values `x: T` and `y: U` +is legal if there is a `given` of type `CanEqual[T, U]`. + +In the default case where the `strictEquality` feature is not enabled the comparison is +also legal if + + 1. `T` and `U` are the same, or + 2. one of `T`, `U` is a subtype of the _lifted_ version of the other type, or + 3. neither `T` nor `U` have a _reflexive_ `CanEqual` instance. + +Explanations: + + - _lifting_ a type `S` means replacing all references to abstract types + in covariant positions of `S` by their upper bound, and replacing + all refinement types in covariant positions of `S` by their parent. + - a type `T` has a _reflexive_ `CanEqual` instance if the implicit search for `CanEqual[T, T]` + succeeds. + +## Predefined CanEqual Instances + +The `CanEqual` object defines instances for comparing + - the primitive types `Byte`, `Short`, `Char`, `Int`, `Long`, `Float`, `Double`, `Boolean`, and `Unit`, + - `java.lang.Number`, `java.lang.Boolean`, and `java.lang.Character`, + - `scala.collection.Seq`, and `scala.collection.Set`. + +Instances are defined so that every one of these types has a _reflexive_ `CanEqual` instance, and the following holds: + + - Primitive numeric types can be compared with each other. + - Primitive numeric types can be compared with subtypes of `java.lang.Number` (and _vice versa_). + - `Boolean` can be compared with `java.lang.Boolean` (and _vice versa_). + - `Char` can be compared with `java.lang.Character` (and _vice versa_). + - Two sequences (of arbitrary subtypes of `scala.collection.Seq`) can be compared + with each other if their element types can be compared. The two sequence types + need not be the same. + - Two sets (of arbitrary subtypes of `scala.collection.Set`) can be compared + with each other if their element types can be compared. The two set types + need not be the same. + - Any subtype of `AnyRef` can be compared with `Null` (and _vice versa_). + +## Why Two Type Parameters? + +One particular feature of the `CanEqual` type is that it takes _two_ type parameters, representing the types of the two items to be compared. By contrast, conventional +implementations of an equality type class take only a single type parameter which represents the common type of _both_ operands. +One type parameter is simpler than two, so why go through the additional complication? The reason has to do with the fact that, rather than coming up with a type class where no operation existed before, +we are dealing with a refinement of pre-existing, universal equality. It is best illustrated through an example. + +Say you want to come up with a safe version of the `contains` method on `List[T]`. The original definition of `contains` in the standard library was: +```scala +class List[+T]: + ... + def contains(x: Any): Boolean +``` +That uses universal equality in an unsafe way since it permits arguments of any type to be compared with the list's elements. The "obvious" alternative definition +```scala + def contains(x: T): Boolean +``` +does not work, since it refers to the covariant parameter `T` in a nonvariant context. The only variance-correct way to use the type parameter `T` in `contains` is as a lower bound: +```scala + def contains[U >: T](x: U): Boolean +``` +This generic version of `contains` is the one used in the current (Scala 2.13) version of `List`. +It looks different but it admits exactly the same applications as the `contains(x: Any)` definition we started with. +However, we can make it more useful (i.e. restrictive) by adding a `CanEqual` parameter: +```scala + def contains[U >: T](x: U)(using CanEqual[T, U]): Boolean // (1) +``` +This version of `contains` is equality-safe! More precisely, given +`x: T`, `xs: List[T]` and `y: U`, then `xs.contains(y)` is type-correct if and only if +`x == y` is type-correct. + +Unfortunately, the crucial ability to "lift" equality type checking from simple equality and pattern matching to arbitrary user-defined operations gets lost if we restrict ourselves to an equality class with a single type parameter. Consider the following signature of `contains` with a hypothetical `CanEqual1[T]` type class: +```scala + def contains[U >: T](x: U)(using CanEqual1[U]): Boolean // (2) +``` +This version could be applied just as widely as the original `contains(x: Any)` method, +since the `CanEqual1[Any]` fallback is always available! So we have gained nothing. What got lost in the transition to a single parameter type class was the original rule that `CanEqual[A, B]` is available only if neither `A` nor `B` have a reflexive `CanEqual` instance. That rule simply cannot be expressed if there is a single type parameter for `CanEqual`. + +The situation is different under `-language:strictEquality`. In that case, +the `CanEqual[Any, Any]` or `CanEqual1[Any]` instances would never be available, and the +single and two-parameter versions would indeed coincide for most practical purposes. + +But assuming `-language:strictEquality` immediately and everywhere poses migration problems which might well be unsurmountable. Consider again `contains`, which is in the standard library. Parameterizing it with the `CanEqual` type class as in (1) is an immediate win since it rules out non-sensical applications while still allowing all sensible ones. +So it can be done almost at any time, modulo binary compatibility concerns. +On the other hand, parameterizing `contains` with `CanEqual1` as in (2) would make `contains` +unusable for all types that have not yet declared a `CanEqual1` instance, including all +types coming from Java. This is clearly unacceptable. It would lead to a situation where, +rather than migrating existing libraries to use safe equality, the only upgrade path is to have parallel libraries, with the new version only catering to types deriving `CanEqual1` and the old version dealing with everything else. Such a split of the ecosystem would be very problematic, which means the cure is likely to be worse than the disease. + +For these reasons, it looks like a two-parameter type class is the only way forward because it can take the existing ecosystem where it is and migrate it towards a future where more and more code uses safe equality. + +In applications where `-language:strictEquality` is the default one could also introduce a one-parameter type alias such as +```scala +type Eq[-T] = CanEqual[T, T] +``` +Operations needing safe equality could then use this alias instead of the two-parameter `CanEqual` class. But it would only +work under `-language:strictEquality`, since otherwise the universal `Eq[Any]` instance would be available everywhere. + + +More on multiversal equality is found in a [blog post](http://www.scala-lang.org/blog/2016/05/06/multiversal-equality.html) +and a [GitHub issue](https://github.com/lampepfl/dotty/issues/1247). diff --git a/docs/_spec/TODOreference/contextual/relationship-implicits.md b/docs/_spec/TODOreference/contextual/relationship-implicits.md new file mode 100644 index 000000000000..fce07f51151a --- /dev/null +++ b/docs/_spec/TODOreference/contextual/relationship-implicits.md @@ -0,0 +1,206 @@ +--- +layout: doc-page +title: "Relationship with Scala 2 Implicits" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/relationship-implicits.html +--- + +Many, but not all, of the new contextual abstraction features in Scala 3 can be mapped to Scala 2's implicits. This page gives a rundown on the relationships between new and old features. + +## Simulating Scala 3 Contextual Abstraction Concepts with Scala 2 Implicits + +### Given Instances + +Given instances can be mapped to combinations of implicit objects, classes and implicit methods. + + 1. Given instances without parameters are mapped to implicit objects. For instance, + + ```scala + given intOrd: Ord[Int] with { ... } + ``` + + maps to + + ```scala + implicit object intOrd extends Ord[Int] { ... } + ``` + + 2. Parameterized givens are mapped to combinations of classes and implicit methods. For instance, + + ```scala + given listOrd[T](using ord: Ord[T]): Ord[List[T]] with { ... } + ``` + + maps to + + ```scala + class listOrd[T](implicit ord: Ord[T]) extends Ord[List[T]] { ... } + final implicit def listOrd[T](implicit ord: Ord[T]): listOrd[T] = + new listOrd[T] + ``` + + 3. Alias givens map to implicit methods or implicit lazy vals. If an alias has neither type nor context parameters, + it is treated as a lazy val, unless the right-hand side is a simple reference, in which case we can use a forwarder to + that reference without caching it. + +Examples: + +```scala +given global: ExecutionContext = new ForkJoinContext() + +val ctx: Context +given Context = ctx +``` + +would map to + +```scala +final implicit lazy val global: ExecutionContext = new ForkJoinContext() +final implicit def given_Context = ctx +``` + +### Anonymous Given Instances + +Anonymous given instances get compiler synthesized names, which are generated in a reproducible way from the implemented type(s). For example, if the names of the `IntOrd` and `ListOrd` givens above were left out, the following names would be synthesized instead: + +```scala +given given_Ord_Int: Ord[Int] with { ... } +given given_Ord_List[T](using ord: Ord[T]): Ord[List[T]] with { ... } +``` + +The synthesized type names are formed from + +1. the prefix `given_`, +2. the simple name(s) of the implemented type(s), leaving out any prefixes, +3. the simple name(s) of the top-level argument type constructors to these types. + +Tuples are treated as transparent, i.e. a type `F[(X, Y)]` would get the synthesized name +`F_X_Y`. Directly implemented function types `A => B` are represented as `A_to_B`. Function types used as arguments to other type constructors are represented as `Function`. + +### Using Clauses + +Using clauses correspond largely to Scala 2's implicit parameter clauses. E.g. + +```scala +def max[T](x: T, y: T)(using ord: Ord[T]): T +``` + +would be written + +```scala +def max[T](x: T, y: T)(implicit ord: Ord[T]): T +``` + +in Scala 2. The main difference concerns applications of such parameters. +Explicit arguments to parameters of using clauses _must_ be written using `(using ...)`, +mirroring the definition syntax. E.g, `max(2, 3)(using IntOrd)`. +Scala 2 uses normal applications `max(2, 3)(IntOrd)` instead. The Scala 2 syntax has some inherent ambiguities and restrictions which are overcome by the new syntax. For instance, multiple implicit parameter lists are not available in the old syntax, even though they can be simulated using auxiliary objects in the "Aux" pattern. + +The `summon` method corresponds to `implicitly` in Scala 2. +It is precisely the same as the `the` method in [Shapeless](https://github.com/milessabin/shapeless). +The difference between `summon` (or `the`) and `implicitly` is +that `summon` can return a more precise type than the type that was +asked for. + +### Context Bounds + +Context bounds are the same in both language versions. They expand to the respective forms of implicit parameters. + +**Note:** To ease migration, context bounds in Scala 3 map for a limited time to old-style implicit parameters for which arguments can be passed either in a using clause or +in a normal argument list. Once old-style implicits are deprecated, context bounds +will map to using clauses instead. + +### Extension Methods + +Extension methods have no direct counterpart in Scala 2, but they can be simulated with implicit classes. For instance, the extension method + +```scala +extension (c: Circle) + def circumference: Double = c.radius * math.Pi * 2 +``` + +could be simulated to some degree by + +```scala +implicit class CircleDecorator(c: Circle) extends AnyVal { + def circumference: Double = c.radius * math.Pi * 2 +} +``` + +Abstract extension methods in traits that are implemented in given instances have no direct counterpart in Scala 2. The only way to simulate these is to make implicit classes available through imports. The Simulacrum macro library can automate this process in some cases. + +### Type Class Derivation + +Type class derivation has no direct counterpart in the Scala 2 language. Comparable functionality can be achieved by macro-based libraries such as [Shapeless](https://github.com/milessabin/shapeless), [Magnolia](https://propensive.com/opensource/magnolia), or [scalaz-deriving](https://github.com/scalaz/scalaz-deriving). + +### Context Function Types + +Context function types have no analogue in Scala 2. + +### Implicit By-Name Parameters + +Implicit by-name parameters are not supported in Scala 2, but can be emulated to some degree by the `Lazy` type in Shapeless. + +## Simulating Scala 2 Implicits in Scala 3 + +### Implicit Conversions + +Implicit conversion methods in Scala 2 can be expressed as given instances of the `scala.Conversion` class in Scala 3. For instance, instead of + +```scala +implicit def stringToToken(str: String): Token = new Keyword(str) +``` + +one can write + +```scala +given stringToToken: Conversion[String, Token] with + def apply(str: String): Token = KeyWord(str) +``` + +or + +```scala +given stringToToken: Conversion[String, Token] = KeyWord(_) +``` + +### Implicit Classes + +Implicit classes in Scala 2 are often used to define extension methods, which are directly supported in Scala 3. Other uses of implicit classes can be simulated by a pair of a regular class and a given `Conversion` instance. + +### Implicit Values + +Implicit `val` definitions in Scala 2 can be expressed in Scala 3 using a regular `val` definition and an alias given. +For instance, Scala 2's + +```scala +lazy implicit val pos: Position = tree.sourcePos +``` + +can be expressed in Scala 3 as + +```scala +lazy val pos: Position = tree.sourcePos +given Position = pos +``` + +### Abstract Implicits + +An abstract implicit `val` or `def` in Scala 2 can be expressed in Scala 3 using a regular abstract definition and an alias given. For instance, Scala 2's + +```scala +implicit def symDecorator: SymDecorator +``` + +can be expressed in Scala 3 as + +```scala +def symDecorator: SymDecorator +given SymDecorator = symDecorator +``` + +## Implementation Status and Timeline + +The Scala 3 implementation implements both Scala 2's implicits and the new abstractions. In fact, support for Scala 2's implicits is an essential part of the common language subset between 2.13 and Scala 3. +Migration to the new abstractions will be supported by making automatic rewritings available. + +Depending on adoption patterns, old style implicits might start to be deprecated in a version following Scala 3.0. diff --git a/docs/_spec/TODOreference/contextual/right-associative-extension-methods.md b/docs/_spec/TODOreference/contextual/right-associative-extension-methods.md new file mode 100644 index 000000000000..068123df8cd2 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/right-associative-extension-methods.md @@ -0,0 +1,52 @@ +--- +layout: doc-page +title: "Right-Associative Extension Methods: Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/right-associative-extension-methods.html +--- + +The most general form of leading parameters of an extension method is as follows: + + - A possibly empty list of using clauses `leadingUsing` + - A single parameter `extensionParam` + - A possibly empty list of using clauses `trailingUsing` + +This is then followed by `def`, the method name, and possibly further parameters +`otherParams`. An example is: + +```scala + extension (using a: A, b: B)(using c: C) // <-- leadingUsing + (x: X) // <-- extensionParam + (using d: D) // <-- trailingUsing + def +:: (y: Y)(using e: E)(z: Z) // <-- otherParams +``` + +An extension method is treated as a right-associative operator +(as in [SLS §6.12.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#infix-operations)) +if it has a name ending in `:` and is immediately followed by a +single parameter. In the example above, that parameter is `(y: Y)`. + +The Scala compiler pre-processes a right-associative infix operation such as `x +: xs` +to `xs.+:(x)` if `x` is a pure expression or a call-by-name parameter and to `val y = x; xs.+:(y)` otherwise. This is necessary since a regular right-associative infix method +is defined in the class of its right operand. To make up for this swap, +the expansion of right-associative extension methods performs an analogous parameter swap. More precisely, if `otherParams` consists of a single parameter +`rightParam` followed by `remaining`, the total parameter sequence +of the extension method's expansion is: + +``` + leadingUsing rightParam trailingUsing extensionParam remaining +``` + +For instance, the `+::` method above would become + +```scala + def +:: (using a: A, b: B)(using c: C) + (y: Y) + (using d: D) + (x: X) + (using e: E)(z: Z) +``` + +This expansion has to be kept in mind when writing right-associative extension +methods with inter-parameter dependencies. + +An overall simpler design could be obtained if right-associative operators could _only_ be defined as extension methods, and would be disallowed as normal methods. In that case neither arguments nor parameters would have to be swapped. Future versions of Scala should strive to achieve this simplification. diff --git a/docs/_spec/TODOreference/contextual/type-classes.md b/docs/_spec/TODOreference/contextual/type-classes.md new file mode 100644 index 000000000000..9fc0d2eec864 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/type-classes.md @@ -0,0 +1,282 @@ +--- +layout: doc-page +title: "Implementing Type classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/type-classes.html +--- + +A _type class_ is an abstract, parameterized type that lets you add new behavior to any closed data type without using sub-typing. This can be useful in multiple use-cases, for example: + +* expressing how a type you don't own (from the standard or 3rd-party library) conforms to such behavior +* expressing such a behavior for multiple types without involving sub-typing relationships (one `extends` another) between those types (see: [ad hoc polymorphism](https://en.wikipedia.org/wiki/Ad_hoc_polymorphism) for instance) + +Therefore in Scala 3, _type classes_ are just _traits_ with one or more parameters whose implementations are not defined through the `extends` keyword, but by **given instances**. +Here are some examples of common type classes: + +## Semigroups and monoids + +Here's the `Monoid` type class definition: + +```scala +trait SemiGroup[T]: + extension (x: T) def combine (y: T): T + +trait Monoid[T] extends SemiGroup[T]: + def unit: T +``` + +An implementation of this `Monoid` type class for the type `String` can be the following: + +```scala +given Monoid[String] with + extension (x: String) def combine (y: String): String = x.concat(y) + def unit: String = "" +``` + +Whereas for the type `Int` one could write the following: + +```scala +given Monoid[Int] with + extension (x: Int) def combine (y: Int): Int = x + y + def unit: Int = 0 +``` + +This monoid can now be used as _context bound_ in the following `combineAll` method: + +```scala +def combineAll[T: Monoid](xs: List[T]): T = + xs.foldLeft(summon[Monoid[T]].unit)(_.combine(_)) +``` + +To get rid of the `summon[...]` we can define a `Monoid` object as follows: + +```scala +object Monoid: + def apply[T](using m: Monoid[T]) = m +``` + +Which would allow to re-write the `combineAll` method this way: + +```scala +def combineAll[T: Monoid](xs: List[T]): T = + xs.foldLeft(Monoid[T].unit)(_.combine(_)) +``` + +## Functors + +A `Functor` for a type provides the ability for its values to be "mapped over", i.e. apply a function that transforms inside a value while remembering its shape. For example, to modify every element of a collection without dropping or adding elements. +We can represent all types that can be "mapped over" with `F`. It's a type constructor: the type of its values becomes concrete when provided a type argument. +Therefore we write it `F[_]`, hinting that the type `F` takes another type as argument. +The definition of a generic `Functor` would thus be written as: + +```scala +trait Functor[F[_]]: + def map[A, B](x: F[A], f: A => B): F[B] +``` + +Which could read as follows: "A `Functor` for the type constructor `F[_]` represents the ability to transform `F[A]` to `F[B]` through the application of function `f` with type `A => B`". We call the `Functor` definition here a _type class_. +This way, we could define an instance of `Functor` for the `List` type: + +```scala +given Functor[List] with + def map[A, B](x: List[A], f: A => B): List[B] = + x.map(f) // List already has a `map` method +``` + +With this `given` instance in scope, everywhere a `Functor` is expected, the compiler will accept a `List` to be used. + +For instance, we may write such a testing method: + +```scala +def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = + assert(expected == summon[Functor[F]].map(original, mapping)) +``` + +And use it this way, for example: + +```scala +assertTransformation(List("a1", "b1"), List("a", "b"), elt => s"${elt}1") +``` + +That's a first step, but in practice we probably would like the `map` function to be a method directly accessible on the type `F`. So that we can call `map` directly on instances of `F`, and get rid of the `summon[Functor[F]]` part. +As in the previous example of Monoids, [`extension` methods](extension-methods.md) help achieving that. Let's re-define the `Functor` type class with extension methods. + +```scala +trait Functor[F[_]]: + extension [A](x: F[A]) + def map[B](f: A => B): F[B] +``` + +The instance of `Functor` for `List` now becomes: + +```scala +given Functor[List] with + extension [A](xs: List[A]) + def map[B](f: A => B): List[B] = + xs.map(f) // List already has a `map` method + +``` + +It simplifies the `assertTransformation` method: + +```scala +def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = + assert(expected == original.map(mapping)) +``` + +The `map` method is now directly used on `original`. It is available as an extension method +since `original`'s type is `F[A]` and a given instance for `Functor[F[A]]` which defines `map` +is in scope. + +## Monads + +Applying `map` in `Functor[List]` to a mapping function of type `A => B` results in a `List[B]`. So applying it to a mapping function of type `A => List[B]` results in a `List[List[B]]`. To avoid managing lists of lists, we may want to "flatten" the values in a single list. + +That's where `Monad` comes in. A `Monad` for type `F[_]` is a `Functor[F]` with two more operations: + +* `flatMap`, which turns an `F[A]` into an `F[B]` when given a function of type `A => F[B]`, +* `pure`, which creates an `F[A]` from a single value `A`. + +Here is the translation of this definition in Scala 3: + +```scala +trait Monad[F[_]] extends Functor[F]: + + /** The unit value for a monad */ + def pure[A](x: A): F[A] + + extension [A](x: F[A]) + /** The fundamental composition operation */ + def flatMap[B](f: A => F[B]): F[B] + + /** The `map` operation can now be defined in terms of `flatMap` */ + def map[B](f: A => B) = x.flatMap(f.andThen(pure)) + +end Monad +``` + +### List + +A `List` can be turned into a monad via this `given` instance: + +```scala +given listMonad: Monad[List] with + def pure[A](x: A): List[A] = + List(x) + extension [A](xs: List[A]) + def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) // rely on the existing `flatMap` method of `List` +``` + +Since `Monad` is a subtype of `Functor`, `List` is also a functor. The Functor's `map` +operation is already provided by the `Monad` trait, so the instance does not need to define +it explicitly. + +### Option + +`Option` is an other type having the same kind of behaviour: + +```scala +given optionMonad: Monad[Option] with + def pure[A](x: A): Option[A] = + Option(x) + extension [A](xo: Option[A]) + def flatMap[B](f: A => Option[B]): Option[B] = xo match + case Some(x) => f(x) + case None => None +``` + +### Reader + +Another example of a `Monad` is the _Reader_ Monad, which acts on functions instead of +data types like `List` or `Option`. It can be used to combine multiple functions +that all need the same parameter. For instance multiple functions needing access to some configuration, context, environment variables, etc. + +Let's define a `Config` type, and two functions using it: + +```scala +trait Config +// ... +def compute(i: Int)(config: Config): String = ??? +def show(str: String)(config: Config): Unit = ??? +``` + +We may want to combine `compute` and `show` into a single function, accepting a `Config` as parameter, and showing the result of the computation, and we'd like to use +a monad to avoid passing the parameter explicitly multiple times. So postulating +the right `flatMap` operation, we could write: + +```scala +def computeAndShow(i: Int): Config => Unit = compute(i).flatMap(show) +``` + +instead of + +```scala +show(compute(i)(config))(config) +``` + +Let's define this m then. First, we are going to define a type named `ConfigDependent` representing a function that when passed a `Config` produces a `Result`. + +```scala +type ConfigDependent[Result] = Config => Result +``` + +The monad instance will look like this: + +```scala +given configDependentMonad: Monad[ConfigDependent] with + + def pure[A](x: A): ConfigDependent[A] = + config => x + + extension [A](x: ConfigDependent[A]) + def flatMap[B](f: A => ConfigDependent[B]): ConfigDependent[B] = + config => f(x(config))(config) + +end configDependentMonad +``` + +The type `ConfigDependent` can be written using [type lambdas](../new-types/type-lambdas.md): + +```scala +type ConfigDependent = [Result] =>> Config => Result +``` + +Using this syntax would turn the previous `configDependentMonad` into: + +```scala +given configDependentMonad: Monad[[Result] =>> Config => Result] with + + def pure[A](x: A): Config => A = + config => x + + extension [A](x: Config => A) + def flatMap[B](f: A => Config => B): Config => B = + config => f(x(config))(config) + +end configDependentMonad +``` + +It is likely that we would like to use this pattern with other kinds of environments than our `Config` trait. The Reader monad allows us to abstract away `Config` as a type _parameter_, named `Ctx` in the following definition: + +```scala +given readerMonad[Ctx]: Monad[[X] =>> Ctx => X] with + + def pure[A](x: A): Ctx => A = + ctx => x + + extension [A](x: Ctx => A) + def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(x(ctx))(ctx) + +end readerMonad +``` + +## Summary + +The definition of a _type class_ is expressed with a parameterised type with abstract members, such as a `trait`. +The main difference between subtype polymorphism and ad-hoc polymorphism with _type classes_ is how the definition of the _type class_ is implemented, in relation to the type it acts upon. +In the case of a _type class_, its implementation for a concrete type is expressed through a `given` instance definition, which is supplied as an implicit argument alongside the value it acts upon. With subtype polymorphism, the implementation is mixed into the parents of a class, and only a single term is required to perform a polymorphic operation. The type class solution +takes more effort to set up, but is more extensible: Adding a new interface to a +class requires changing the source code of that class. But contrast, instances for type classes can be defined anywhere. + +To conclude, we have seen that traits and given instances, combined with other constructs like extension methods, context bounds and type lambdas allow a concise and natural expression of _type classes_. diff --git a/docs/_spec/TODOreference/contextual/using-clauses.md b/docs/_spec/TODOreference/contextual/using-clauses.md new file mode 100644 index 000000000000..9187e1916e7d --- /dev/null +++ b/docs/_spec/TODOreference/contextual/using-clauses.md @@ -0,0 +1,153 @@ +--- +layout: doc-page +title: "Using Clauses" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/using-clauses.html +--- + +Functional programming tends to express most dependencies as simple function parameterization. +This is clean and powerful, but it sometimes leads to functions that take many parameters where the same value is passed over and over again in long call chains to many +functions. Context parameters can help here since they enable the compiler to synthesize +repetitive arguments instead of the programmer having to write them explicitly. + +For example, with the [given instances](./givens.md) defined previously, +a `max` function that works for any arguments for which an ordering exists can be defined as follows: + +```scala +def max[T](x: T, y: T)(using ord: Ord[T]): T = + if ord.compare(x, y) < 0 then y else x +``` + +Here, `ord` is a _context parameter_ introduced with a `using` clause. +The `max` function can be applied as follows: + +```scala +max(2, 3)(using intOrd) +``` + +The `(using intOrd)` part passes `intOrd` as an argument for the `ord` parameter. But the point of context parameters is that this argument can also be left out (and it usually is). So the following applications are equally valid: + +```scala +max(2, 3) +max(List(1, 2, 3), Nil) +``` + +## Anonymous Context Parameters + +In many situations, the name of a context parameter need not be +mentioned explicitly at all, since it is used only in synthesized arguments for +other context parameters. In that case one can avoid defining a parameter name +and just provide its type. Example: + +```scala +def maximum[T](xs: List[T])(using Ord[T]): T = + xs.reduceLeft(max) +``` + +`maximum` takes a context parameter of type `Ord[T]` only to pass it on as an +inferred argument to `max`. The name of the parameter is left out. + +Generally, context parameters may be defined either as a full parameter list `(p_1: T_1, ..., p_n: T_n)` or just as a sequence of types `T_1, ..., T_n`. Vararg parameters are not supported in `using` clauses. + +## Class Context Parameters + +If a class context parameter is made a member by adding a `val` or `var` modifier, +then that member is available as a given instance. + +Compare the following examples, where the attempt to supply an explicit `given` member induces an ambiguity: + +```scala +class GivenIntBox(using val givenInt: Int): + def n = summon[Int] + +class GivenIntBox2(using givenInt: Int): + given Int = givenInt + //def n = summon[Int] // ambiguous +``` + +The `given` member is importable as explained in the section on [importing `given`s](./given-imports.md): + +```scala +val b = GivenIntBox(using 23) +import b.given +summon[Int] // 23 + +import b.* +//givenInt // Not found +``` + +## Inferring Complex Arguments + +Here are two other methods that have a context parameter of type `Ord[T]`: + +```scala +def descending[T](using asc: Ord[T]): Ord[T] = new Ord[T]: + def compare(x: T, y: T) = asc.compare(y, x) + +def minimum[T](xs: List[T])(using Ord[T]) = + maximum(xs)(using descending) +``` + +The `minimum` method's right-hand side passes `descending` as an explicit argument to `maximum(xs)`. +With this setup, the following calls are all well-formed, and they all normalize to the last one: + +```scala +minimum(xs) +maximum(xs)(using descending) +maximum(xs)(using descending(using listOrd)) +maximum(xs)(using descending(using listOrd(using intOrd))) +``` + +## Multiple `using` Clauses + +There can be several `using` clauses in a definition and `using` clauses can be freely mixed with normal parameter clauses. Example: + +```scala +def f(u: Universe)(using ctx: u.Context)(using s: ctx.Symbol, k: ctx.Kind) = ... +``` + +Multiple `using` clauses are matched left-to-right in applications. Example: + +```scala +object global extends Universe { type Context = ... } +given ctx : global.Context with { type Symbol = ...; type Kind = ... } +given sym : ctx.Symbol +given kind: ctx.Kind + +``` +Then the following calls are all valid (and normalize to the last one) + +```scala +f(global) +f(global)(using ctx) +f(global)(using ctx)(using sym, kind) +``` + +But `f(global)(using sym, kind)` would give a type error. + + +## Summoning Instances + +The method `summon` in `Predef` returns the given of a specific type. For example, +the given instance for `Ord[List[Int]]` is produced by + +```scala +summon[Ord[List[Int]]] // reduces to listOrd(using intOrd) +``` + +The `summon` method is simply defined as the (non-widening) identity function over a context parameter. + +```scala +def summon[T](using x: T): x.type = x +``` + +## Syntax + +Here is the new syntax of parameters and arguments seen as a delta from the [standard context free syntax of Scala 3](../syntax.md). `using` is a soft keyword, recognized only at the start of a parameter or argument list. It can be used as a normal identifier everywhere else. + +``` +ClsParamClause ::= ... | UsingClsParamClause +DefParamClauses ::= ... | UsingParamClause +UsingClsParamClause ::= ‘(’ ‘using’ (ClsParams | Types) ‘)’ +UsingParamClause ::= ‘(’ ‘using’ (DefParams | Types) ‘)’ +ParArgumentExprs ::= ... | ‘(’ ‘using’ ExprsInParens ‘)’ +``` diff --git a/docs/_spec/TODOreference/dropped-features/nonlocal-returns.md b/docs/_spec/TODOreference/dropped-features/nonlocal-returns.md new file mode 100644 index 000000000000..17b86f77ee56 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/nonlocal-returns.md @@ -0,0 +1,29 @@ +--- +layout: doc-page +title: "Deprecated: Nonlocal Returns" + +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html +--- + +Returning from nested anonymous functions has been deprecated, and will produce a warning from version `3.2`. + +Nonlocal returns are implemented by throwing and catching `scala.runtime.NonLocalReturnException`-s. This is rarely what is intended by the programmer. It can be problematic because of the hidden performance cost of throwing and catching exceptions. Furthermore, it is a leaky implementation: a catch-all exception handler can intercept a `NonLocalReturnException`. + +A drop-in library replacement is provided in [`scala.util.control.NonLocalReturns`](https://scala-lang.org/api/3.x/scala/util/control/NonLocalReturns$.html). Example: + +```scala +import scala.util.control.NonLocalReturns.* + +extension [T](xs: List[T]) + def has(elem: T): Boolean = returning { + for x <- xs do + if x == elem then throwReturn(true) + false + } + +@main def test(): Unit = + val xs = List(1, 2, 3, 4, 5) + assert(xs.has(2) == xs.contains(2)) +``` + +Note: compiler produces deprecation error on nonlocal returns only with `-source:future` option. diff --git a/docs/_spec/TODOreference/dropped-features/package-objects.md b/docs/_spec/TODOreference/dropped-features/package-objects.md new file mode 100644 index 000000000000..d8149e460bf5 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/package-objects.md @@ -0,0 +1,48 @@ +--- +layout: doc-page +title: "Dropped: Package Objects" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/package-objects.html +--- + +Package objects +```scala +package object p { + val a = ... + def b = ... +} +``` +will be dropped. They are still available in Scala 3.0 and 3.1, but will be deprecated and removed afterwards. + +Package objects are no longer needed since all kinds of definitions can now be written at the top-level. Example: +```scala +package p +type Labelled[T] = (String, T) +val a: Labelled[Int] = ("count", 1) +def b = a._2 + +case class C() + +extension (x: C) def pair(y: C) = (x, y) +``` +There may be several source files in a package containing such top-level definitions, and source files can freely mix top-level value, method, and type definitions with classes and objects. + +The compiler generates synthetic objects that wrap top-level definitions falling into one of the following categories: + + - all pattern, value, method, and type definitions, + - implicit classes and objects, + - companion objects of opaque type aliases. + +If a source file `src.scala` contains such top-level definitions, they will be put in a synthetic object named `src$package`. The wrapping is transparent, however. The definitions in `src` can still be accessed as members of the enclosing package. The synthetic object will be placed last in the file, +after any other package clauses, imports, or object and class definitions. + +**Note:** This means that +1. The name of a source file containing wrapped top-level definitions is relevant for binary compatibility. If the name changes, so does the name of the generated object and its class. + +2. A top-level main method `def main(args: Array[String]): Unit = ...` is wrapped as any other method. If it appears +in a source file `src.scala`, it could be invoked from the command line using a command like `scala src$package`. Since the +"program name" is mangled it is recommended to always put `main` methods in explicitly named objects. + +3. The notion of `private` is independent of whether a definition is wrapped or not. A `private` top-level definition is always visible from everywhere in the enclosing package. + +4. If several top-level definitions are overloaded variants with the same name, +they must all come from the same source file. diff --git a/docs/_spec/TODOreference/dropped-features/type-projection.md b/docs/_spec/TODOreference/dropped-features/type-projection.md new file mode 100644 index 000000000000..08b5ffb34eca --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/type-projection.md @@ -0,0 +1,18 @@ +--- +layout: doc-page +title: "Dropped: General Type Projection" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/type-projection.html +--- + +Scala so far allowed general type projection `T#A` where `T` is an arbitrary type +and `A` names a type member of `T`. + +Scala 3 disallows this if `T` is an abstract type (class types and type aliases +are fine). This change was made because unrestricted type projection +is [unsound](https://github.com/lampepfl/dotty/issues/1050). + +This restriction rules out the [type-level encoding of a combinator +calculus](https://michid.wordpress.com/2010/01/29/scala-type-level-encoding-of-the-ski-calculus/). + +To rewrite code using type projections on abstract types, consider using +path-dependent types or implicit parameters. diff --git a/docs/_spec/TODOreference/dropped-features/weak-conformance-spec.md b/docs/_spec/TODOreference/dropped-features/weak-conformance-spec.md new file mode 100644 index 000000000000..07625dcfe885 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/weak-conformance-spec.md @@ -0,0 +1,54 @@ +--- +layout: doc-page +title: "Dropped: Weak Conformance - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/weak-conformance-spec.html +--- + +To simplify the underlying type theory, Scala 3 drops the notion of +[*weak conformance*](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#weak-conformance) +altogether. Instead, it provides more flexibility when +assigning a type to a constant expression. The new rule is: + + - *If* a list of expressions `Es` appears as one of + + - the elements of a vararg parameter, or + - the alternatives of an if-then-else or match expression, or + - the body and catch results of a try expression, + +- *and* all expressions have primitive numeric types, but they do not + all have the same type, + +- *then* the following is attempted: + + - the expressions `Es` are partitioned into `Int` constants on the + one hand, and all other expressions on the other hand, + - if all the other expressions have the same numeric type `T` + (which can be one of `Byte`, `Short`, `Char`, `Int`, `Long`, `Float`, + `Double`), possibly after widening, and if none of the `Int` + literals would incur a loss of precision when converted to `T`, + then they are thus converted (the other expressions are left + unchanged regardless), + - otherwise, the expressions `Es` are used unchanged. + + A loss of precision occurs for + - an `Int -> Float` conversion of a constant + `c` if `c.toFloat.toInt != c` + - an `Int -> Byte` conversion of a constant + `c` if `c.toByte.toInt != c`, + - an `Int -> Short` conversion of a constant + `c` if `c.toShort.toInt != c`. + +## Examples + +```scala +inline val b = 33 +def f(): Int = b + 1 +Array(b, 33, 5.5) : Array[Double] // b is an inline val +Array(f(), 33, 5.5) : Array[AnyVal] // f() is not a constant +Array(5, 11L) : Array[Long] +Array(5, 11L, 5.5) : Array[AnyVal] // Long and Double found +Array(1.0f, 2) : Array[Float] +Array(1.0f, 1234567890): Array[AnyVal] // loss of precision +Array(b, 33, 'a') : Array[Char] +Array(5.toByte, 11) : Array[Byte] +``` diff --git a/docs/_spec/TODOreference/dropped-features/weak-conformance.md b/docs/_spec/TODOreference/dropped-features/weak-conformance.md new file mode 100644 index 000000000000..b1478326b2c9 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/weak-conformance.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: "Dropped: Weak Conformance" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/weak-conformance.html +--- + +In some situations, Scala used a _weak conformance_ relation when +testing type compatibility or computing the least upper bound of a set +of types. The principal motivation behind weak conformance was to +make an expression like this have type `List[Double]`: + +```scala +List(1.0, math.sqrt(3.0), 0, -3.3) // : List[Double] +``` + +It's "obvious" that this should be a `List[Double]`. However, without +some special provision, the least upper bound of the lists's element +types `(Double, Double, Int, Double)` would be `AnyVal`, hence the list +expression would be given type `List[AnyVal]`. + +A less obvious example is the following one, which was also typed as a +`List[Double]`, using the weak conformance relation. + +```scala +val n: Int = 3 +val c: Char = 'X' +val d: Double = math.sqrt(3.0) +List(n, c, d) // used to be: List[Double], now: List[AnyVal] +``` + +Here, it is less clear why the type should be widened to +`List[Double]`, a `List[AnyVal]` seems to be an equally valid -- and +more principled -- choice. + +Weak conformance applies to all "numeric" types (including `Char`), and +independently of whether the expressions are literals or not. However, +in hindsight, the only intended use case is for *integer literals* to +be adapted to the type of the other expressions. Other types of numerics +have an explicit type annotation embedded in their syntax (`f`, `d`, +`.`, `L` or `'` for `Char`s) which ensures that their author really +meant them to have that specific type). + +Therefore, Scala 3 drops the general notion of weak conformance, and +instead keeps one rule: `Int` literals are adapted to other numeric +types if necessary. + +[More details](weak-conformance-spec.md) diff --git a/docs/_spec/TODOreference/dropped-features/wildcard-init.md b/docs/_spec/TODOreference/dropped-features/wildcard-init.md new file mode 100644 index 000000000000..e42854079cf9 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/wildcard-init.md @@ -0,0 +1,23 @@ +--- +layout: doc-page +title: "Dropped: Wildcard Initializer" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/wildcard-init.html +--- + +The syntax + +```scala + var x: A = _ +``` + +that was used to indicate an uninitialized field, has been dropped. +At its place there is a special value `uninitialized` in the `scala.compiletime` package. +To get an uninitialized field, you now write + +```scala +import scala.compiletime.uninitialized + +var x: A = uninitialized +``` + +To enable cross-compilation, `_` is still supported, but it will be dropped in a future 3.x version. diff --git a/docs/_spec/TODOreference/dropped-features/xml.md b/docs/_spec/TODOreference/dropped-features/xml.md new file mode 100644 index 000000000000..458a347a66c4 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/xml.md @@ -0,0 +1,39 @@ +--- +layout: doc-page +title: "Dropped: XML Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/xml.html +--- + +XML Literals are still supported, but will be dropped in the near future, to +be replaced with [XML string interpolation](https://github.com/lampepfl/xml-interpolator): + +```scala +import dotty.xml.interpolator.* + +case class Person(name: String) { override def toString = name } + +@main def test: Unit = + val bill = Person("Bill") + val john = Person("John") + val mike = Person("Mike") + val todoList = List( + (bill, john, "Meeting", "Room 203, 11:00am"), + (john, mike, "Holiday", "March 22-24") + ) + // XML literals (to be dropped) + val mails1 = for (from, to, heading, body) <- todoList yield + + {from}{to} + {heading}{body} + + println(mails1) + // XML string interpolation + val mails2 = for (from, to, heading, body) <- todoList yield xml""" + + ${from}${to} + ${heading}${body} + """ + println(mails2) +``` + +For more information, see the semester project [XML String Interpolator for Dotty](https://infoscience.epfl.ch/record/267527) by Yassin Kammoun (2019). diff --git a/docs/_spec/TODOreference/experimental/canthrow.md b/docs/_spec/TODOreference/experimental/canthrow.md new file mode 100644 index 000000000000..025a0ed1c686 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/canthrow.md @@ -0,0 +1,281 @@ +--- +layout: doc-page +title: "CanThrow Capabilities" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/canthrow.html +--- + +This page describes experimental support for exception checking in Scala 3. It is enabled by the language import +```scala +import language.experimental.saferExceptions +``` +The reason for publishing this extension now is to get feedback on its usability. We are working on more advanced type systems that build on the general ideas put forward in the extension. Those type systems have application areas beyond checked exceptions. Exception checking is a useful starting point since exceptions are familiar to all Scala programmers and their current treatment leaves room for improvement. + +## Why Exceptions? + +Exceptions are an ideal mechanism for error handling in many situations. They serve the intended purpose of propagating error conditions with a minimum of boilerplate. They cause zero overhead for the "happy path", which means they are very efficient as long as errors arise infrequently. Exceptions are also debug friendly, since they produce stack traces that can be inspected at the handler site. So one never has to guess where an erroneous condition originated. + +## Why Not Exceptions? + +However, exceptions in current Scala and many other languages are not reflected in the type system. This means that an essential part of the contract of a function - i.e. what exceptions can it produce? - is not statically checked. Most people acknowledge that this is a problem, but that so far the alternative of checked exceptions was just too painful to be considered. A good example are Java checked exceptions, which do the right thing in principle, but are widely regarded as a mistake since they are so difficult to deal with. So far, none of the successor languages that are modeled after Java or that build on the JVM has copied this feature. See for example Anders Hejlsberg's [statement on why C# does not have checked exceptions](https://www.artima.com/articles/the-trouble-with-checked-exceptions). + +## The Problem With Java's Checked Exceptions + +The main problem with [Java's checked exception model](https://docs.oracle.com/javase/specs/jls/se8/html/jls-11.html#jls-11.2) is its inflexibility, which is due to lack of polymorphism. Consider for instance the `map` function which is declared on `List[A]` like this: +```scala + def map[B](f: A => B): List[B] +``` +In the Java model, function `f` is not allowed to throw a checked exception. So the following call would be invalid: +```scala + xs.map(x => if x < limit then x * x else throw LimitExceeded()) +``` +The only way around this would be to wrap the checked exception `LimitExceeded` in an unchecked [`java.lang.RuntimeException`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/RuntimeException.html) that is caught at the callsite and unwrapped again. Something like this: +```scala + try + xs.map(x => if x < limit then x * x else throw Wrapper(LimitExceeded())) + catch case Wrapper(ex) => throw ex +``` +Ugh! No wonder checked exceptions in Java are not very popular. + +## Monadic Effects + +So the dilemma is that exceptions are easy to use only as long as we forget static type checking. This has caused many people working with Scala to abandon exceptions altogether and to use an error monad like [`Either`](https://scala-lang.org/api/3.x/scala/util/Either.html) instead. This can work in many situations but is not without its downsides either. It makes code a lot more complicated and harder to refactor. It means one is quickly confronted with the problem how to work with several monads. In general, dealing with one monad at a time in Scala is straightforward but dealing with several monads together is much less pleasant since monads don't compose. A great number of techniques have been proposed, implemented, and promoted to deal with this, from monad transformers, to free monads, to tagless final. But none of these techniques is universally liked; each introduces a complicated DSL that's hard to understand for non-experts, introduces runtime overheads, and makes debugging difficult. In the end, quite a few developers prefer to work instead with a single "super-monad" like [`ZIO`](https://zio.dev/version-1.x/datatypes/core/zio) that has error propagation built in alongside other aspects. This one-size fits all approach can work very nicely, even though (or is it because?) it represents an all-encompassing framework. + +However, a programming language is not a framework; it has to cater also for those applications that do not fit the framework's use cases. So there's still a strong motivation for getting exception checking right. + +## From Effects To Capabilities + +Why does `map` work so poorly with Java's checked exception model? It's because +`map`'s signature limits function arguments to not throw checked exceptions. We could try to come up with a more polymorphic formulation of `map`. For instance, it could look like this: +```scala + def map[B, E](f: A => B throws E): List[B] throws E +``` +This assumes a type `A throws E` to indicate computations of type `A` that can throw an exception of type `E`. But in practice the overhead of the additional type parameters makes this approach unappealing as well. Note in particular that we'd have to parameterize _every method_ that takes a function argument that way, so the added overhead of declaring all these exception types looks just like a sort of ceremony we would like to avoid. + +But there is a way to avoid the ceremony. Instead of concentrating on possible _effects_ such as "this code might throw an exception", concentrate on _capabilities_ such as "this code needs the capability to throw an exception". From a standpoint of expressiveness this is quite similar. But capabilities can be expressed as parameters whereas traditionally effects are expressed as some addition to result values. It turns out that this can make a big difference! + +## The `CanThrow` Capability + +In the _effects as capabilities_ model, an effect is expressed as an (implicit) parameter of a certain type. For exceptions we would expect parameters of type +[`CanThrow[E]`](https://scala-lang.org/api/3.x/scala/CanThrow.html) where `E` stands for the exception that can be thrown. Here is the definition of `CanThrow`: +```scala +erased class CanThrow[-E <: Exception] +``` +This shows another experimental Scala feature: [erased definitions](./erased-defs.md). Roughly speaking, values of an erased class do not generate runtime code; they are erased before code generation. This means that all `CanThrow` capabilities are compile-time only artifacts; they do not have a runtime footprint. + +Now, if the compiler sees a `throw Exc()` construct where `Exc` is a checked exception, it will check that there is a capability of type `CanThrow[Exc]` that can be summoned as a given. It's a compile-time error if that's not the case. + +How can the capability be produced? There are several possibilities: + +Most often, the capability is produced by having a using clause `(using CanThrow[Exc])` in some enclosing scope. This roughly corresponds to a [`throws`](https://docs.oracle.com/javase/specs/jls/se7/html/jls-8.html#jls-8.4.6) clause in Java. The analogy is even stronger since alongside [`CanThrow`](https://scala-lang.org/api/3.x/scala/CanThrow.html) there is also the following type alias defined in the [`scala`](https://scala-lang.org/api/3.x/scala.html) package: +```scala +infix type A = Int +``` +```scala +infix type $throws[R, +E <: Exception] = CanThrow[E] ?=> R +``` +That is, [`R $throws E`](https://scala-lang.org/api/3.x/scala/runtime.html#$throws-0) is a context function type that takes an implicit `CanThrow[E]` parameter and that returns a value of type `R`. What's more, the compiler will translate an infix types with `throws` as the operator to `$throws` applications according to the rules +``` + A throws E --> A $throws E + A throws E₁ | ... | Eᵢ --> A $throws E₁ ... $throws Eᵢ +``` +Therefore, a method written like this: +```scala +def m(x: T)(using CanThrow[E]): U +``` +can alternatively be expressed like this: +```scala +def m(x: T): U throws E +``` +Also the capability to throw multiple types of exceptions can be expressed in a few ways as shown in the examples below: +```scala +def m(x: T): U throws E1 | E2 +def m(x: T): U throws E1 throws E2 +def m(x: T)(using CanThrow[E1], CanThrow[E2]): U +def m(x: T)(using CanThrow[E1])(using CanThrow[E2]): U +def m(x: T)(using CanThrow[E1]): U throws E2 +``` + +**Note 1:** A signature like +```scala +def m(x: T)(using CanThrow[E1 | E2]): U +``` +would also allow throwing `E1` or `E2` inside the method's body but might cause problems when someone tried to call this method +from another method declaring its `CanThrow` capabilities like in the earlier examples. +This is because `CanThrow` has a contravariant type parameter so `CanThrow[E1 | E2]` is a subtype of both `CanThrow[E1]` and `CanThrow[E2]`. +Hence the presence of a given instance of `CanThrow[E1 | E2]` in scope satisfies the requirement for `CanThrow[E1]` and `CanThrow[E2]` +but given instances of `CanThrow[E1]` and `CanThrow[E2]` cannot be combined to provide and instance of `CanThrow[E1 | E2]`. + +**Note 2:** One should keep in mind that `|` binds its left and right arguments more tightly than `throws` so `A | B throws E1 | E2` means `(A | B) throws (Ex1 | Ex2)`, not `A | (B throws E1) | E2`. + +The `CanThrow`/`throws` combo essentially propagates the `CanThrow` requirement outwards. But where are these capabilities created in the first place? That's in the `try` expression. Given a `try` like this: + +```scala +try + body +catch + case ex1: Ex1 => handler1 + ... + case exN: ExN => handlerN +``` +the compiler generates an accumulated capability of type `CanThrow[Ex1 | ... | Ex2]` that is available as a given in the scope of `body`. It does this by augmenting the `try` roughly as follows: +```scala +try + erased given CanThrow[Ex1 | ... | ExN] = compiletime.erasedValue + body +catch ... +``` +Note that the right-hand side of the synthesized given is `???` (undefined). This is OK since +this given is erased; it will not be executed at runtime. + +**Note 1:** The [`saferExceptions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$saferExceptions$.html) feature is designed to work only with checked exceptions. An exception type is _checked_ if it is a subtype of +`Exception` but not of `RuntimeException`. The signature of `CanThrow` still admits `RuntimeException`s since `RuntimeException` is a proper subtype of its bound, `Exception`. But no capabilities will be generated for `RuntimeException`s. Furthermore, `throws` clauses +also may not refer to `RuntimeException`s. + +**Note 2:** To keep things simple, the compiler will currently only generate capabilities +for catch clauses of the form +```scala + case ex: Ex => +``` +where `ex` is an arbitrary variable name (`_` is also allowed), and `Ex` is an arbitrary +checked exception type. Constructor patterns such as `Ex(...)` or patterns with guards +are not allowed. The compiler will issue an error if one of these is used to catch +a checked exception and `saferExceptions` is enabled. + +## Example + +That's it. Let's see it in action in an example. First, add an import +```scala +import language.experimental.saferExceptions +``` +to enable exception checking. Now, define an exception `LimitExceeded` and +a function `f` like this: +```scala +val limit = 10e9 +class LimitExceeded extends Exception +def f(x: Double): Double = + if x < limit then x * x else throw LimitExceeded() +``` +You'll get this error message: +``` + if x < limit then x * x else throw LimitExceeded() + ^^^^^^^^^^^^^^^^^^^^^ +The capability to throw exception LimitExceeded is missing. +``` +The capability can be provided by one of the following: + + - Adding a using clause `(using CanThrow[LimitExceeded])` to the definition of the enclosing method + - Adding `throws LimitExceeded` clause after the result type of the enclosing method + - Wrapping this piece of code with a `try` block that catches `LimitExceeded` + +The following import might fix the problem: +```scala + import unsafeExceptions.canThrowAny +``` +As the error message implies, you have to declare that `f` needs the capability to throw a `LimitExceeded` exception. The most concise way to do so is to add a `throws` clause: +```scala +def f(x: Double): Double throws LimitExceeded = + if x < limit then x * x else throw LimitExceeded() +``` +Now put a call to `f` in a `try` that catches `LimitExceeded`: +```scala +@main def test(xs: Double*) = + try println(xs.map(f).sum) + catch case ex: LimitExceeded => println("too large") +``` +Run the program with some inputs: +``` +> scala test 1 2 3 +14.0 +> scala test +0.0 +> scala test 1 2 3 100000000000 +too large +``` +Everything typechecks and works as expected. But wait - we have called `map` without any ceremony! How did that work? Here's how the compiler expands the `test` function: +```scala +// compiler-generated code +@main def test(xs: Double*) = + try + erased given ctl: CanThrow[LimitExceeded] = compiletime.erasedValue + println(xs.map(x => f(x)(using ctl)).sum) + catch case ex: LimitExceeded => println("too large") +``` +The `CanThrow[LimitExceeded]` capability is passed in a synthesized `using` clause to `f`, since `f` requires it. Then the resulting closure is passed to `map`. The signature of `map` does not have to account for effects. It takes a closure as always, but that +closure may refer to capabilities in its free variables. This means that `map` is +already effect polymorphic even though we did not change its signature at all. +So the takeaway is that the effects as capabilities model naturally provides for effect polymorphism whereas this is something that other approaches struggle with. + +## Gradual Typing Via Imports + +Another advantage is that the model allows a gradual migration from current unchecked exceptions to safer exceptions. Imagine for a moment that [`experimental.saferExceptions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$saferExceptions$.html) is turned on everywhere. There would be lots of code that breaks since functions have not yet been properly annotated with `throws`. But it's easy to create an escape hatch that lets us ignore the breakages for a while: simply add the import +```scala +import scala.unsafeExceptions.canThrowAny +``` +This will provide the [`CanThrow`](https://scala-lang.org/api/3.x/scala/CanThrow.html) capability for any exception, and thereby allow +all throws and all other calls, no matter what the current state of `throws` declarations is. Here's the +definition of [`canThrowAny`](https://scala-lang.org/api/3.x/scala/unsafeExceptions$.html#canThrowAny-0): +```scala +package scala +object unsafeExceptions: + given canThrowAny: CanThrow[Exception] = ??? +``` +Of course, defining a global capability like this amounts to cheating. But the cheating is useful for gradual typing. The import could be used to migrate existing code, or to +enable more fluid explorations of code without regard for complete exception safety. At the end of these migrations or explorations the import should be removed. + +## Scope Of the Extension + +To summarize, the extension for safer exception checking consists of the following elements: + + - It adds to the standard library the class `scala.CanThrow`, the type `scala.$throws`, and the [`scala.unsafeExceptions`](https://scala-lang.org/api/3.x/scala/unsafeExceptions$.html) object, as they were described above. + - It adds some desugaring rules ro rewrite `throws` types to cascaded `$throws` types. + - It augments the type checking of `throw` by _demanding_ a `CanThrow` capability or the thrown exception. + - It augments the type checking of `try` by _providing_ `CanThrow` capabilities for every caught exception. + +That's all. It's quite remarkable that one can do exception checking in this way without any special additions to the type system. We just need regular givens and context functions. Any runtime overhead is eliminated using `erased`. + +## Caveats + +Our capability model allows to declare and check the thrown exceptions of first-order code. But as it stands, it does not give us enough mechanism to enforce the _absence_ of +capabilities for arguments to higher-order functions. Consider a variant `pureMap` +of `map` that should enforce that its argument does not throw exceptions or have any other effects (maybe because wants to reorder computations transparently). Right now +we cannot enforce that since the function argument to `pureMap` can capture arbitrary +capabilities in its free variables without them showing up in its type. One possible way to +address this would be to introduce a pure function type (maybe written `A -> B`). Pure functions are not allowed to close over capabilities. Then `pureMap` could be written +like this: +```scala + def pureMap(f: A -> B): List[B] +``` +Another area where the lack of purity requirements shows up is when capabilities escape from bounded scopes. Consider the following function +```scala +def escaped(xs: Double*): () => Int = + try () => xs.map(f).sum + catch case ex: LimitExceeded => -1 +``` +With the system presented here, this function typechecks, with expansion +```scala +// compiler-generated code +def escaped(xs: Double*): () => Int = + try + given ctl: CanThrow[LimitExceeded] = ??? + () => xs.map(x => f(x)(using ctl)).sum + catch case ex: LimitExceeded => -1 +``` +But if you try to call `escaped` like this +```scala +val g = escaped(1, 2, 1000000000) +g() +``` +the result will be a `LimitExceeded` exception thrown at the second line where `g` is called. What's missing is that `try` should enforce that the capabilities it generates do not escape as free variables in the result of its body. It makes sense to describe such scoped effects as _ephemeral capabilities_ - they have lifetimes that cannot be extended to delayed code in a lambda. + + +## Outlook + +We are working on a new class of type system that supports ephemeral capabilities by tracking the free variables of values. Once that research matures, it will hopefully be possible to augment the Scala language so that we can enforce the missing properties. + +And it would have many other applications besides: Exceptions are a special case of _algebraic effects_, which has been a very active research area over the last 20 years and is finding its way into programming languages (e.g. [Koka](https://koka-lang.github.io/koka/doc/book.html#why-handlers), [Eff](https://www.eff-lang.org/learn/), [Multicore OCaml](https://discuss.ocaml.org/t/multicore-ocaml-september-2021-effect-handlers-will-be-in-ocaml-5-0/8554), [Unison](https://www.unisonweb.org/docs/language-reference/#abilities-and-ability-handlers)). In fact, algebraic effects have been characterized as being equivalent to exceptions with an additional _resume_ operation. The techniques developed here for exceptions can probably be generalized to other classes of algebraic effects. + +But even without these additional mechanisms, exception checking is already useful as it is. It gives a clear path forward to make code that uses exceptions safer, better documented, and easier to refactor. The only loophole arises for scoped capabilities - here we have to verify manually that these capabilities do not escape. Specifically, a `try` always has to be placed in the same computation stage as the throws that it enables. + +Put another way: If the status quo is 0% static checking since 100% is too painful, then an alternative that gives you 95% static checking with great ergonomics looks like a win. And we might still get to 100% in the future. + +For more info, see also our [paper at the ACM Scala Symposium 2021](https://infoscience.epfl.ch/record/290885). diff --git a/docs/_spec/TODOreference/experimental/cc.md b/docs/_spec/TODOreference/experimental/cc.md new file mode 100644 index 000000000000..878bc0a64ed6 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/cc.md @@ -0,0 +1,738 @@ +--- +layout: doc-page +title: "Capture Checking" +--- + +Capture checking is a research project that modifies the Scala type system to track references to capabilities in values. It can be enabled with a `-Ycc` compiler option. +At present, capture checking is still highly experimental and unstable. + +To get an idea what capture checking can do, let's start with a small example: +```scala +def usingLogFile[T](op: FileOutputStream => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result +``` +The `usingLogFile` method invokes a given operation with a fresh log file as parameter. Once the operation has ended, the log file is closed and the +operation's result is returned. This is a typical _try-with-resources_ pattern, similar to many other such patterns which are often supported by special language constructs in other languages. + +The problem is that `usingLogFile`'s implementation is not entirely safe. One can +undermine it by passing an operation that performs the logging at some later point +after it has terminated. For instance: +```scala +val later = usingLogFile { file => () => file.write(0) } +later() // crash +``` +When `later` is executed it tries to write to a file that is already closed, which +results in an uncaught `IOException`. + +Capture checking gives us the mechanism to prevent such errors _statically_. To +prevent unsafe usages of `usingLogFile`, we can declare it like this: +```scala +def usingLogFile[T](op: ({*} FileOutputStream) => T): T = + // same body as before +``` +The only thing that's changed is that the `FileOutputStream` parameter of `op` is now +tagged with `{*}`. We'll see that this turns the parameter into a _capability_ whose lifetime is tracked. + +If we now try to define the problematic value `later`, we get a static error: +``` + | val later = usingLogFile { f => () => f.write(0) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` +In this case, it was easy to see that the `logFile` capability escapes in the closure passed to `usingLogFile`. But capture checking also works for more complex cases. +For instance, capture checking is able to distinguish between the following safe code: +```scala +val xs = usingLogFile { f => + List(1, 2, 3).map { x => f.write(x); x * x } +} +``` +and the following unsafe one: +```scala +val xs = usingLogFile { f => + LazyList(1, 2, 3).map { x => f.write(x); x * x } +} +``` +An error would be issued in the second case, but not the first one (this assumes a capture-aware +formulation of `LazyList` which we will present later in this page). + +It turns out that capture checking has very broad applications. Besides the various +try-with-resources patterns, it can also be a key part to the solutions of many other long standing problems in programming languages. Among them: + + - How to have a simple and flexible system for checked exceptions. We show later + how capture checking enables a clean and fully safe system for checked exceptions in Scala. + - How to address the problem of effect polymorphism in general. + - How to solve the "what color is your function?" problem of mixing synchronous + and asynchronous computations. + - How to do region-based allocation, safely, + - How to reason about capabilities associated with memory locations. + +The following sections explain in detail how capture checking works in Scala 3. + + +## Overview + +The capture checker extension introduces a new kind of types and it enforces some rules for working with these types. + +Capture checking is enabled by the compiler option `-Ycc`. If the option is not given, the new +type forms can still be written but they are not checked for consistency, because they are +treated simply as certain uninterpreted annotated types. + +## Capabilities and Capturing Types + +Capture checking is done in terms of _capturing types_ of the form +`{c₁, ..., cᵢ} T`. Here `T` is a type, and `{c₁, ..., cᵢ}` is a _capture set_ consisting of references to capabilities `c₁, ..., cᵢ`. + +A _capability_ is syntactically a method- or class-parameter, a local variable, or the `this` of an enclosing class. The type of a capability +must be a capturing type with a non-empty capture set. We also say that +variables that are capabilities are _tracked_. + +In a sense, every +capability gets its authority from some other, more sweeping capability which it captures. The most sweeping capability, from which ultimately all others are derived is written `*`. We call it the _universal capability_. + +Here is an example: +```scala +class FileSystem + +class Logger(fs: {*} FileSystem): + def log(s: String): Unit = ... // Write to a log file, using `fs` + +def test(fs: {*} FileSystem) = + val l: {fs} Logger = Logger(fs) + l.log("hello world!") + val xs: {l} LazyList[Int] = + LazyList.from(1) + .map { i => + l.log(s"computing elem # $i") + i * i + } + xs +``` +Here, the `test` method takes a `FileSystem` as a parameter. `fs` is a capability since its type has a non-empty capture set. The capability is passed to the `Logger` constructor +and retained as a field in class `Logger`. Hence, the local variable `l` has type +`{fs} Logger`: it is a `Logger` which retains the `fs` capability. + +The second variable defined in `test` is `xs`, a lazy list that is obtained from +`LazyList.from(1)` by logging and mapping consecutive numbers. Since the list is lazy, +it needs to retain the reference to the logger `l` for its computations. Hence, the +type of the list is `{l} LazyList[Int]`. On the other hand, since `xs` only logs but does +not do other file operations, it retains the `fs` capability only indirectly. That's why +`fs` does not show up in the capture set of `xs`. + +Capturing types come with a subtype relation where types with "smaller" capture sets are subtypes of types with larger sets (the _subcapturing_ relation is defined in more detail below). If a type `T` does not have a capture set, it is called _pure_, and is a subtype of +any capturing type that adds a capture set to `T`. + +## Function Types + +The usual function type `A => B` now stands for a function that can capture arbitrary capabilities. We call such functions +_impure_. By contrast, the new single arrow function type `A -> B` stands for a function that cannot capture any capabilities, or otherwise said, is _pure_. One can add a capture set in front of an otherwise pure function. +For instance, `{c, d} A -> B` would be a function that can capture capabilities `c` and `d`, but no others. + +The impure function type `A => B` is treated as an alias for `{*} A -> B`. That is, impure functions are functions that can capture anything. + +Function types and captures both associate to the right, so +```scala +{c} A -> {d} B -> C +``` +is the same as +```scala +{c} (A -> {d} (B -> C)) +``` +Contrast with +```scala +({c} A) -> ({d} B) -> C +``` +which is a curried pure function over argument types that can capture `c` and `d`, respectively. + +Analogous conventions apply to context function types. `A ?=> B` is an impure context function, with `A ?-> B` as its pure complement. + +**Note 1:** The identifiers `->` and `?->` are now treated as soft keywords when used as infix type operators. They are +still available as regular identifiers for terms. For instance, the mapping syntax `Map("x" -> 1, "y" -> 2)` is still supported since it only applies to terms. + +**Note 2:** The distinctions between pure vs impure function types do not apply to methods. In fact, since methods are not values they never capture anything directly. References to +capabilities in a method are instead counted in the capture set of the enclosing object. + +## By-Name Parameter Types + +A convention analogous to function types also extends to by-name parameters. In +```scala +def f(x: => Int): Int +``` +the actual argument can refer to arbitrary capabilities. So the following would be OK: +```scala +f(if p(y) then throw Ex() else 1) +``` +On the other hand, if `f` was defined like this +```scala +def f(x: -> Int): Int +``` +the actual argument to `f` could not refer to any capabilities, so the call above would be rejected. +One can also allow specific capabilities like this: +```scala +def f(x: {c}-> Int): Int +``` +Here, the actual argument to `f` is allowed to use the `c` capability but no others. + +**Note**: It is strongly recommended to write the capability set and the arrow `->` without intervening spaces, +as otherwise the notation would look confusingly like a function type. + +## Subtyping and Subcapturing + +Capturing influences subtyping. As usual we write `T₁ <: T₂` to express that the type +`T₁` is a subtype of the type `T₂`, or equivalently, that `T₁` conforms to `T₂`. An +analogous _subcapturing_ relation applies to capture sets. If `C₁` and `C₂` are capture sets, we write `C₁ <: C₂` to express that `C₁` _is covered by_ `C₂`, or, swapping the operands, that `C₂` _covers_ `C₁`. + +Subtyping extends as follows to capturing types: + + - Pure types are subtypes of capturing types. That is, `T <: C T`, for any type `T`, capturing set `C`. + - For capturing types, smaller capturing sets produce subtypes: `C₁ T₁ <: C₂ T₂` if + `C₁ <: C₂` and `T₁ <: T₂`. + +A subcapturing relation `C₁ <: C₂` holds if `C₂` _accounts for_ every element `c` in `C₁`. This means one of the following three conditions must be true: + + - `c ∈ C₂`, + - `c` refers to a parameter of some class `Cls` and `C₂` contains `Cls.this`, + - `c`'s type has capturing set `C` and `C₂` accounts for every element of `C` (that is, `C <: C₂`). + + +**Example 1.** Given +```scala +fs: {*} FileSystem +ct: {*} CanThrow[Exception] +l : {fs} Logger +``` +we have +``` +{l} <: {fs} <: {*} +{fs} <: {fs, ct} <: {*} +{ct} <: {fs, ct} <: {*} +``` +The set consisting of the root capability `{*}` covers every other capture set. This is +a consequence of the fact that, ultimately, every capability is created from `*`. + +**Example 2.** Consider again the FileSystem/Logger example from before. `LazyList[Int]` is a proper subtype of `{l} LazyList[Int]`. So if the `test` method in that example +was declared with a result type `LazyList[Int]`, we'd get a type error. Here is the error message: +``` +11 |def test(using fs: {*} FileSystem): LazyList[Int] = { + | ^ + | Found: {fs} LazyList[Int] + | Required: LazyList[Int] +``` +Why does it say `{fs} LazyList[Int]` and not `{l} LazyList[Int]`, which is, after all, the type of the returned value `xs`? The reason is that `l` is a local variable in the body of `test`, so it cannot be referred to in a type outside that body. What happens instead is that the type is _widened_ to the smallest supertype that does not mention `l`. Since `l` has capture set `fs`, we have that `{fs}` covers `{l}`, and `{fs}` is acceptable in a result type of `test`, so `{fs}` is the result of that widening. +This widening is called _avoidance_; it is not specific to capture checking but applies to all variable references in Scala types. + +## Capability Classes + +Classes like `CanThrow` or `FileSystem` have the property that their values are always intended to be capabilities. We can make this intention explicit and save boilerplate by declaring these classes with a `@capability` annotation. + +The capture set of a capability class type is always `{*}`. This means we could equivalently express the `FileSystem` and `Logger` classes as follows: +```scala +import annotation.capability + +@capability class FileSystem + +class Logger(using FileSystem): + def log(s: String): Unit = ??? + +def test(using fs: FileSystem) = + val l: {fs} Logger = Logger() + ... +``` +In this version, `FileSystem` is a capability class, which means that the `{*}` capture set is implied on the parameters of `Logger` and `test`. Writing the capture set explicitly produces a warning: +```scala +class Logger(using {*} FileSystem): + ^^^^^^^^^^^^^^ + redundant capture: FileSystem already accounts for * +``` +Another, unrelated change in the version of the last example here is that the `FileSystem` capability is now passed as an implicit parameter. It is quite natural to model capabilities with implicit parameters since it greatly reduces the wiring overhead once multiple capabilities are in play. + +## Capture Checking of Closures + +If a closure refers to capabilities in its body, it captures these capabilities in its type. For instance, consider: +```scala +def test(fs: FileSystem): {fs} String -> Unit = + (x: String) => Logger(fs).log(x) +``` +Here, the body of `test` is a lambda that refers to the capability `fs`, which means that `fs` is retained in the lambda. +Consequently, the type of the lambda is `{fs} String -> Unit`. + +**Note:** Function values are always written with `=>` (or `?=>` for context functions). There is no syntactic +distinction for pure _vs_ impure function values. The distinction is only made in their types. + +A closure also captures all capabilities that are captured by the functions +it calls. For instance, in +```scala +def test(fs: FileSystem) = + def f() = g() + def g() = (x: String) => Logger(fs).log(x) + f +``` +the result of `test` has type `{fs} String -> Unit` even though function `f` itself does not refer to `fs`. + +## Capture Checking of Classes + +The principles for capture checking closures also apply to classes. For instance, consider: +```scala +class Logger(using fs: FileSystem): + def log(s: String): Unit = ... summon[FileSystem] ... + +def test(xfs: FileSystem): {xfs} Logger = + Logger(xfs) +``` +Here, class `Logger` retains the capability `fs` as a (private) field. Hence, the result +of `test` is of type `{xfs} Logger` + +Sometimes, a tracked capability is meant to be used only in the constructor of a class, but +is not intended to be retained as a field. This fact can be communicated to the capture +checker by declaring the parameter as `@constructorOnly`. Example: +```scala +import annotation.constructorOnly + +class NullLogger(using @constructorOnly fs: FileSystem): + ... +def test2(using fs: FileSystem): NullLogger = NullLogger() // OK +``` + +The captured references of a class include _local capabilities_ and _argument capabilities_. Local capabilities are capabilities defined outside the class and referenced from its body. Argument capabilities are passed as parameters to the primary constructor of the class. Local capabilities are inherited: +the local capabilities of a superclass are also local capabilities of its subclasses. Example: + +```scala +@capability class Cap + +def test(a: Cap, b: Cap, c: Cap) = + class Super(y: Cap): + def f = a + class Sub(x: Cap) extends Super(x) + def g = b + Sub(c) +``` +Here class `Super` has local capability `a`, which gets inherited by class +`Sub` and is combined with `Sub`'s own local capability `b`. Class `Sub` also has an argument capability corresponding to its parameter `x`. This capability gets instantiated to `c` in the final constructor call `Sub(c)`. Hence, +the capture set of that call is `{a, b, c}`. + +The capture set of the type of `this` of a class is inferred by the capture checker, unless the type is explicitly declared with a self type annotation like this one: +```scala +class C: + self: {a, b} D => ... +``` +The inference observes the following constraints: + + - The type of `this` of a class `C` includes all captured references of `C`. + - The type of `this` of a class `C` is a subtype of the type of `this` + of each parent class of `C`. + - The type of `this` must observe all constraints where `this` is used. + +For instance, in +```scala +@capability class Cap +def test(c: Cap) = + class A: + val x: A = this + def f = println(c) // error +``` +we know that the type of `this` must be pure, since `this` is the right hand side of a `val` with type `A`. However, in the last line we find that the capture set of the class, and with it the capture set of `this`, would include `c`. This leads to a contradiction, and hence to a checking error: +``` +16 | def f = println(c) // error + | ^ + |(c : Cap) cannot be referenced here; it is not included in the allowed capture set {} +``` + +## Capture Tunnelling + +Consider the following simple definition of a `Pair` class: +```scala +class Pair[+A, +B](x: A, y: B): + def fst: A = x + def snd: B = y +``` +What happens if `Pair` is instantiated like this (assuming `ct` and `fs` are two capabilities in scope)? +```scala +def x: {ct} Int -> String +def y: {fs} Logger +def p = Pair(x, y) +``` +The last line will be typed as follows: +```scala +def p: Pair[{ct} Int -> String, {fs} Logger] = Pair(x, y) +``` +This might seem surprising. The `Pair(x, y)` value does capture capabilities `ct` and `fs`. Why don't they show up in its type at the outside? + +The answer is capture tunnelling. Once a type variable is instantiated to a capturing type, the +capture is not propagated beyond this point. On the other hand, if the type variable is instantiated +again on access, the capture information "pops out" again. For instance, even though `p` is technically pure because its capture set is empty, writing `p.fst` would record a reference to the captured capability `ct`. So if this access was put in a closure, the capability would again form part of the outer capture set. E.g. +```scala +() => p.fst : {ct} () -> {ct} Int -> String +``` +In other words, references to capabilities "tunnel through" in generic instantiations from creation to access; they do not affect the capture set of the enclosing generic data constructor applications. +This principle plays an important part in making capture checking concise and practical. + +## Escape Checking + +The universal capability `*` should be conceptually available only as a parameter to the main program. Indeed, if it was available everywhere, capability checking would be undermined since one could mint new capabilities +at will. In line with this reasoning, some capture sets are restricted so that +they are not allowed to contain the universal capability. + +Specifically, if a capturing type is an instance of a type variable, that capturing type +is not allowed to carry the universal capability `{*}`. There's a connection to tunnelling here. +The capture set of a type has to be present in the environment when a type is instantiated from +a type variable. But `*` is not itself available as a global entity in the environment. Hence, +an error should result. + +We can now reconstruct how this principle produced the error in the introductory example, where +`usingLogFile` was declared like this: +```scala +def usingLogFile[T](op: ({*} FileOutputStream) => T): T = ... +``` +The error message was: +``` + | val later = usingLogFile { f => () => f.write(0) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` +This error message was produced by the following logic: + + - The `f` parameter has type `{*} FileOutputStream`, which makes it a capability. + - Therefore, the type of the expression `() => f.write(0)` is `{f} () -> Unit`. + - This makes the type of the whole closure passed to `usingLogFile` the dependent function type + `(f: {*} FileOutputStream) -> {f} () -> Unit`. + - The expected type of the closure is a simple, parametric, impure function type `({*} FileOutputStream) => T`, + for some instantiation of the type variable `T`. + - The smallest supertype of the closure's dependent function type that is a parametric function type is + `({*} FileOutputStream) => {*} () -> Unit` + - Hence, the type variable `T` is instantiated to `* () -> Unit`, which causes the error. + +An analogous restriction applies to the type of a mutable variable. +Another way one could try to undermine capture checking would be to +assign a closure with a local capability to a global variable. Maybe +like this: +```scala +var loophole: {*} () -> Unit = () => () +usingLogFile { f => + loophole = () => f.write(0) +} +loophole() +``` +But this will not compile either, since mutable variables cannot have universal capture sets. + +One also needs to prevent returning or assigning a closure with a local capability in an argument of a parametric type. For instance, here is a +slightly more refined attack: +```scala +class Cell[+A](x: A) +val sneaky = usingLogFile { f => Cell(() => f.write(0)) } +sneaky.x() +``` +At the point where the `Cell` is created, the capture set of the argument is `f`, which +is OK. But at the point of use, it is `*` (because `f` is no longer in scope), which causes again an error: +``` + | sneaky.x() + | ^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` + +Looking at object graphs, we observe a monotonicity property: The capture set of an object `x` covers the capture sets of all objects reachable through `x`. This property is reflected in the type system by the following _monotonicity rule_: + + - In a class `C` with a field `f`, the capture set `{this}` covers the capture set `{this.f}` as well as the capture set of any application of `this.f` to pure arguments. + +## Checked Exceptions + +Scala enables checked exceptions through a language import. Here is an example, +taken from the [safer exceptions page](./canthrow.md), and also described in a +[paper](https://infoscience.epfl.ch/record/290885) presented at the + 2021 Scala Symposium. +```scala +import language.experimental.saferExceptions + +class LimitExceeded extends Exception + +val limit = 10e+10 +def f(x: Double): Double throws LimitExceeded = + if x < limit then x * x else throw LimitExceeded() +``` +The new `throws` clause expands into an implicit parameter that provides +a `CanThrow` capability. Hence, function `f` could equivalently be written +like this: +```scala +def f(x: Double)(using CanThrow[LimitExceeded]): Double = ... +``` +If the implicit parameter is missing, an error is reported. For instance, the function definition +```scala +def g(x: Double): Double = + if x < limit then x * x else throw LimitExceeded() +``` +is rejected with this error message: +``` + | if x < limit then x * x else throw LimitExceeded() + | ^^^^^^^^^^^^^^^^^^^^^ + |The capability to throw exception LimitExceeded is missing. + |The capability can be provided by one of the following: + | - Adding a using clause `(using CanThrow[LimitExceeded])` to the definition of the enclosing method + | - Adding `throws LimitExceeded` clause after the result type of the enclosing method + | - Wrapping this piece of code with a `try` block that catches LimitExceeded +``` +`CanThrow` capabilities are required by `throw` expressions and are created +by `try` expressions. For instance, the expression +```scala +try xs.map(f).sum +catch case ex: LimitExceeded => -1 +``` +would be expanded by the compiler to something like the following: +```scala +try + erased given ctl: CanThrow[LimitExceeded] = compiletime.erasedValue + xs.map(f).sum +catch case ex: LimitExceeded => -1 +``` +(The `ctl` capability is only used for type checking but need not show up in the generated code, so it can be declared as +erased.) + +As with other capability based schemes, one needs to guard against capabilities +that are captured in results. For instance, here is a problematic use case: +```scala +def escaped(xs: Double*): (() => Double) throws LimitExceeded = + try () => xs.map(f).sum + catch case ex: LimitExceeded => () => -1 +val crasher = escaped(1, 2, 10e+11) +crasher() +``` +This code needs to be rejected since otherwise the call to `crasher()` would cause +an unhandled `LimitExceeded` exception to be thrown. + +Under `-Ycc`, the code is indeed rejected +``` +14 | try () => xs.map(f).sum + | ^ + |The expression's type {*} () -> Double is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +15 | catch case ex: LimitExceeded => () => -1 +``` +To integrate exception and capture checking, only two changes are needed: + + - `CanThrow` is declared as a `@capability` class, so all references to `CanThrow` instances are tracked. + - Escape checking is extended to `try` expressions. The result type of a `try` is not allowed to + capture the universal capability. + +## A Larger Example + +As a larger example, we present an implementation of lazy lists and some use cases. For simplicity, +our lists are lazy only in their tail part. This corresponds to what the Scala-2 type `Stream` did, whereas Scala 3's `LazyList` type computes strictly less since it is also lazy in the first argument. + +Here is the base trait `LzyList` for our version of lazy lists: +```scala +trait LzyList[+A]: + def isEmpty: Boolean + def head: A + def tail: {this} LzyList[A] +``` +Note that `tail` carries a capture annotation. It says that the tail of a lazy list can +potentially capture the same references as the lazy list as a whole. + +The empty case of a `LzyList` is written as usual: +```scala +object LzyNil extends LzyList[Nothing]: + def isEmpty = true + def head = ??? + def tail = ??? +``` +Here is a formulation of the class for lazy cons nodes: +```scala +import scala.compiletime.uninitialized + +final class LzyCons[+A](hd: A, tl: () => {*} LzyList[A]) extends LzyList[A]: + private var forced = false + private var cache: {this} LzyList[A] = uninitialized + private def force = + if !forced then { cache = tl(); forced = true } + cache + + def isEmpty = false + def head = hd + def tail: {this} LzyList[A] = force +end LzyCons +``` +The `LzyCons` class takes two parameters: A head `hd` and a tail `tl`, which is a function +returning a `LzyList`. Both the function and its result can capture arbitrary capabilities. +The result of applying the function is memoized after the first dereference of `tail` in +the private mutable field `cache`. Note that the typing of the assignment `cache = tl()` relies on the monotonicity rule for `{this}` capture sets. + +Here is an extension method to define an infix cons operator `#:` for lazy lists. It is analogous +to `::` but instead of a strict list it produces a lazy list without evaluating its right operand. +```scala +extension [A](x: A) + def #:(xs1: => {*} LzyList[A]): {xs1} LzyList[A] = + LzyCons(x, () => xs1) +``` +Note that `#:` takes an impure call-by-name parameter `xs1` as its right argument. The result +of `#:` is a lazy list that captures that argument. + +As an example usage of `#:`, here is a method `tabulate` that creates a lazy list +of given length with a generator function `gen`. The generator function is allowed +to have side effects. +```scala +def tabulate[A](n: Int)(gen: Int => A) = + def recur(i: Int): {gen} LzyList[A] = + if i == n then LzyNil + else gen(i) #: recur(i + 1) + recur(0) +``` +Here is a use of `tabulate`: +```scala +class LimitExceeded extends Exception +def squares(n: Int)(using ct: CanThrow[LimitExceeded]) = + tabulate(10) { i => + if i > 9 then throw LimitExceeded() + i * i + } +``` +The inferred result type of `squares` is `{ct} LzyList[Int]`, i.e it is a lazy list of +`Int`s that can throw the `LimitExceeded` exception when it is elaborated by calling `tail` +one or more times. + +Here are some further extension methods for mapping, filtering, and concatenating lazy lists: +```scala +extension [A](xs: {*} LzyList[A]) + def map[B](f: A => B): {xs, f} LzyList[B] = + if xs.isEmpty then LzyNil + else f(xs.head) #: xs.tail.map(f) + + def filter(p: A => Boolean): {xs, p} LzyList[A] = + if xs.isEmpty then LzyNil + else if p(xs.head) then xs.head #: xs.tail.filter(p) + else xs.tail.filter(p) + + def concat(ys: {*} LzyList[A]): {xs, ys} LzyList[A] = + if xs.isEmpty then ys + else xs.head #: xs.tail.concat(ys) + + def drop(n: Int): {xs} LzyList[A] = + if n == 0 then xs else xs.tail.drop(n - 1) +``` +Their capture annotations are all as one would expect: + + - Mapping a lazy list produces a lazy list that captures the original list as well + as the (possibly impure) mapping function. + - Filtering a lazy list produces a lazy list that captures the original list as well + as the (possibly impure) filtering predicate. + - Concatenating two lazy lists produces a lazy list that captures both arguments. + - Dropping elements from a lazy list gives a safe approximation where the original list is captured in the result. In fact, it's only some suffix of the list that is retained at run time, but our modelling identifies lazy lists and their suffixes, so this additional knowledge would not be useful. + +Of course the function passed to `map` or `filter` could also be pure. After all, `A -> B` is a subtype of `{*} A -> B` which is the same as `A => B`. In that case, the pure function +argument will _not_ show up in the result type of `map` or `filter`. For instance: +```scala +val xs = squares(10) +val ys: {xs} LzyList[Int] = xs.map(_ + 1) +``` +The type of the mapped list `ys` has only `xs` in its capture set. The actual function +argument does not show up since it is pure. Likewise, if the lazy list +`xs` was pure, it would not show up in any of the method results. +This demonstrates that capability-based +effect systems with capture checking are naturally _effect polymorphic_. + +This concludes our example. It's worth mentioning that an equivalent program defining and using standard, strict lists would require no capture annotations whatsoever. It would compile exactly as written now in standard Scala 3, yet one gets the capture checking for free. Essentially, `=>` already means "can capture anything" and since in a strict list side effecting operations are not retained in the result, there are no additional captures to record. A strict list could of course capture side-effecting closures in its elements but then tunnelling applies, since +these elements are represented by a type variable. This means we don't need to annotate anything there either. + +Another possibility would be a variant of lazy lists that requires all functions passed to `map`, `filter` and other operations like it to be pure. E.g. `map` on such a list would be defined like this: +```scala +extension [A](xs: LzyList[A]) + def map[B](f: A -> B): LzyList[B] = ... +``` +That variant would not require any capture annotations either. + +To summarize, there are two "sweet spots" of data structure design: strict lists in +side-effecting or resource-aware code and lazy lists in purely functional code. +Both are already correctly capture-typed without requiring any explicit annotations. Capture annotations only come into play where the semantics gets more complicated because we deal with delayed effects such as in impure lazy lists or side-effecting iterators over strict lists. This property is probably one of the greatest plus points of our approach to capture checking compared to previous techniques which tend to be more noisy. + +## Function Type Shorthands + +TBD + +## Compilation Options + +The following options are relevant for capture checking. + + - **-Ycc** Enables capture checking. + - **-Xprint:cc** Prints the program with capturing types as inferred by capture checking. + - **-Ycc-debug** Gives more detailed, implementation-oriented information about capture checking, as described in the next section. + + The implementation supporting capture checking with these options is currently in branch `cc-experiment` on dotty.epfl.ch. + +## Capture Checking Internals + +The capture checker is architected as a propagation constraint solver, which runs as a separate phase after type-checking and some initial transformations. + +Constraint variables stand for unknown capture sets. A constraint variable is introduced + + - for every part of a previously inferred type, + - for the accessed references of every method, class, anonymous function, or by-name argument, + - for the parameters passed in a class constructor call. + +Capture sets in explicitly written types are treated as constants (before capture checking, such sets are simply ignored). + +The capture checker essentially rechecks the program with the usual typing rules. Every time a subtype requirement between capturing types is checked, this translates to a subcapturing test on capture sets. If the two sets are constant, this is simply a yes/no question, where a no will produce an error message. + +If the lower set `C₁` of a comparison `C₁ <: C₂` is a variable, the set `C₂` is recorded +as a _superset_ of `C₁`. If the upper set `C₂` is a variable, the elements of `C₁` are _propagated_ to `C₂`. Propagation of an element `x` to a set `C` means that `x` is included as an element in `C`, and it is also propagated +to all known supersets of `C`. If such a superset is a constant, it is checked that `x` is included in it. If that's not the case, the original comparison `C₁ <: C₂` has no solution and an error is reported. + +The type checker also performs various maps on types, for instance when substituting actual argument types for formal parameter types in dependent functions, or mapping +member types with "as-seen-from" in a selection. Maps keep track of the variance +of positions in a type. The variance is initially covariant, it flips to +contravariant in function parameter positions, and can be either covariant, +contravariant, or nonvariant in type arguments, depending on the variance of +the type parameter. + +When capture checking, the same maps are also performed on capture sets. If a capture set is a constant, its elements (which are capabilities) are mapped as regular types. If the result of such a map is not a capability, the result is approximated according to the variance of the type. A covariant approximation replaces a type by its capture set. +A contravariant approximation replaces it with the empty capture set. A nonvariant +approximation replaces the enclosing capturing type with a range of possible types +that gets propagated and resolved further out. + +When a mapping `m` is performed on a capture set variable `C`, a new variable `Cm` is created that contains the mapped elements and that is linked with `C`. If `C` subsequently acquires further elements through propagation, these are also propagated to `Cm` after being transformed by the `m` mapping. `Cm` also gets the same supersets as `C`, mapped again using `m`. + +One interesting aspect of the capture checker concerns the implementation of capture tunnelling. The [foundational theory](https://infoscience.epfl.ch/record/290885) on which capture checking is based makes tunnelling explicit through so-called _box_ and +_unbox_ operations. Boxing hides a capture set and unboxing recovers it. The capture checker inserts virtual box and unbox operations based on actual and expected types similar to the way the type checker inserts implicit conversions. When capture set variables are first introduced, any capture set in a capturing type that is an instance of a type parameter instance is marked as "boxed". A boxing operation is +inserted if the expected type of an expression is a capturing type with +a boxed capture set variable. The effect of the insertion is that any references +to capabilities in the boxed expression are forgotten, which means that capture +propagation is stopped. Dually, if the actual type of an expression has +a boxed variable as capture set, an unbox operation is inserted, which adds all +elements of the capture set to the environment. + +Boxing and unboxing has no runtime effect, so the insertion of these operations is only simulated; the only visible effect is the retraction and insertion +of variables in the capture sets representing the environment of the currently checked expression. + +The `-Ycc-debug` option provides some insight into the workings of the capture checker. +When it is turned on, boxed sets are marked explicitly and capture set variables are printed with an ID and some information about their provenance. For instance, the string `{f, xs}33M5V` indicates a capture set +variable that is known to hold elements `f` and `xs`. The variable's ID is `33`. The `M` +indicates that the variable was created through a mapping from a variable with ID `5`. The latter is a regular variable, as indicated + by `V`. + +Generally, the string following the capture set consists of alternating numbers and letters where each number gives a variable ID and each letter gives the provenance of the variable. Possible letters are + + - `V` : a regular variable, + - `M` : a variable resulting from a _mapping_ of the variable indicated by the string to the right, + - `B` : similar to `M` but where the mapping is a _bijection_, + - `F` : a variable resulting from _filtering_ the elements of the variable indicated by the string to the right, + - `I` : a variable resulting from an _intersection_ of two capture sets, + - `D` : a variable resulting from the set _difference_ of two capture sets. + +At the end of a compilation run, `-Ycc-debug` will print all variable dependencies of variables referred to in previous output. Here is an example: +``` +Capture set dependencies: + {}2V :: + {}3V :: + {}4V :: + {f, xs}5V :: {f, xs}31M5V, {f, xs}32M5V + {f, xs}31M5V :: {xs, f} + {f, xs}32M5V :: +``` +This section lists all variables that appeared in previous diagnostics and their dependencies, recursively. For instance, we learn that + + - variables 2, 3, 4 are empty and have no dependencies, + - variable `5` has two dependencies: variables `31` and `32` which both result from mapping variable `5`, + - variable `31` has a constant fixed superset `{xs, f}` + - variable `32` has no dependencies. + diff --git a/docs/_spec/TODOreference/experimental/erased-defs-spec.md b/docs/_spec/TODOreference/experimental/erased-defs-spec.md new file mode 100644 index 000000000000..5395a8468399 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/erased-defs-spec.md @@ -0,0 +1,64 @@ +--- +layout: doc-page +title: "Erased Definitions - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/erased-defs-spec.html +--- + +TODO: complete +## Rules + +1. `erased` is a soft modifier. It can appear: + * At the start of a parameter block of a method, function or class + * In a method definition + * In a `val` definition (but not `lazy val` or `var`) + * In a `class` or `trait` definition + + ```scala + erased val x = ... + erased def f = ... + + def g(erased x: Int) = ... + + (erased x: Int) => ... + def h(x: (erased Int) => Int) = ... + + class K(erased x: Int) { ... } + erased class E {} + ``` + + +2. A reference to an `erased` val or def can only be used + * Inside the expression of argument to an `erased` parameter + * Inside the body of an `erased` `val` or `def` + + +3. Functions + * `(erased x1: T1, x2: T2, ..., xN: TN) => y : (erased T1, T2, ..., TN) => R` + * `(given erased x1: T1, x2: T2, ..., xN: TN) => y: (given erased T1, T2, ..., TN) => R` + * `(given erased T1) => R <:< erased T1 => R` + * `(given erased T1, T2) => R <:< (erased T1, T2) => R` + * ... + + Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`) + + +4. Eta expansion + + if `def f(erased x: T): U` then `f: (erased T) => U`. + + +5. Erasure semantics + * All `erased` parameters are removed from the function + * All argument to `erased` parameters are not passed to the function + * All `erased` definitions are removed + * All `(erased T1, T2, ..., TN) => R` and `(given erased T1, T2, ..., TN) => R` become `() => R` + + +6. Overloading + + Method with `erased` parameters will follow the normal overloading constraints after erasure. + + +7. Overriding + * Member definitions overriding each other must both be `erased` or not be `erased` + * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa diff --git a/docs/_spec/TODOreference/experimental/erased-defs.md b/docs/_spec/TODOreference/experimental/erased-defs.md new file mode 100644 index 000000000000..28455f26cdc0 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/erased-defs.md @@ -0,0 +1,231 @@ +--- +layout: doc-page +title: "Erased Definitions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/erased-defs.html +--- + +`erased` is a modifier that expresses that some definition or expression is erased by the compiler instead of being represented in the compiled output. It is not yet part of the Scala language standard. To enable `erased`, turn on the language feature +[`experimental.erasedDefinitions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$erasedDefinitions$.html). This can be done with a language import +```scala +import scala.language.experimental.erasedDefinitions +``` +or by setting the command line option `-language:experimental.erasedDefinitions`. +Erased definitions must be in an experimental scope (see [Experimental definitions](../other-new-features/experimental-defs.md)). + +## Why erased terms? + +Let's describe the motivation behind erased terms with an example. In the +following we show a simple state machine which can be in a state `On` or `Off`. +The machine can change state from `Off` to `On` with `turnedOn` only if it is +currently `Off`. This last constraint is captured with the `IsOff[S]` contextual +evidence which only exists for `IsOff[Off]`. For example, not allowing calling +`turnedOn` on in an `On` state as we would require an evidence of type +`IsOff[On]` that will not be found. + +```scala +sealed trait State +final class On extends State +final class Off extends State + +@implicitNotFound("State must be Off") +class IsOff[S <: State] +object IsOff: + given isOff: IsOff[Off] = new IsOff[Off] + +class Machine[S <: State]: + def turnedOn(using IsOff[S]): Machine[On] = new Machine[On] + +val m = new Machine[Off] +m.turnedOn +m.turnedOn.turnedOn // ERROR +// ^ +// State must be Off +``` + +Note that in the code above the actual context arguments for `IsOff` are never +used at runtime; they serve only to establish the right constraints at compile +time. As these terms are never used at runtime there is not real need to have +them around, but they still need to be present in some form in the generated +code to be able to do separate compilation and retain binary compatibility. We +introduce _erased terms_ to overcome this limitation: we are able to enforce the +right constrains on terms at compile time. These terms have no run time +semantics and they are completely erased. + +## How to define erased terms? + +Parameters of methods and functions can be declared as erased, placing `erased` +in front of a parameter list (like `given`). + +```scala +def methodWithErasedEv(erased ev: Ev): Int = 42 + +val lambdaWithErasedEv: erased Ev => Int = + (erased ev: Ev) => 42 +``` + +`erased` parameters will not be usable for computations, though they can be used +as arguments to other `erased` parameters. + +```scala +def methodWithErasedInt1(erased i: Int): Int = + i + 42 // ERROR: can not use i + +def methodWithErasedInt2(erased i: Int): Int = + methodWithErasedInt1(i) // OK +``` + +Not only parameters can be marked as erased, `val` and `def` can also be marked +with `erased`. These will also only be usable as arguments to `erased` +parameters. + +```scala +erased val erasedEvidence: Ev = ... +methodWithErasedEv(erasedEvidence) +``` + +## What happens with erased values at runtime? + +As `erased` are guaranteed not to be used in computations, they can and will be +erased. + +```scala +// becomes def methodWithErasedEv(): Int at runtime +def methodWithErasedEv(erased ev: Ev): Int = ... + +def evidence1: Ev = ... +erased def erasedEvidence2: Ev = ... // does not exist at runtime +erased val erasedEvidence3: Ev = ... // does not exist at runtime + +// evidence1 is not evaluated and no value is passed to methodWithErasedEv +methodWithErasedEv(evidence1) +``` + +## State machine with erased evidence example + +The following example is an extended implementation of a simple state machine +which can be in a state `On` or `Off`. The machine can change state from `Off` +to `On` with `turnedOn` only if it is currently `Off`, conversely from `On` to +`Off` with `turnedOff` only if it is currently `On`. These last constraint are +captured with the `IsOff[S]` and `IsOn[S]` given evidence only exist for +`IsOff[Off]` and `IsOn[On]`. For example, not allowing calling `turnedOff` on in +an `Off` state as we would require an evidence `IsOn[Off]` that will not be +found. + +As the given evidences of `turnedOn` and `turnedOff` are not used in the +bodies of those functions we can mark them as `erased`. This will remove the +evidence parameters at runtime, but we would still evaluate the `isOn` and +`isOff` givens that were found as arguments. As `isOn` and `isOff` are not +used except as `erased` arguments, we can mark them as `erased`, hence removing +the evaluation of the `isOn` and `isOff` evidences. + +```scala +import scala.annotation.implicitNotFound + +sealed trait State +final class On extends State +final class Off extends State + +@implicitNotFound("State must be Off") +class IsOff[S <: State] +object IsOff: + // will not be called at runtime for turnedOn, the + // compiler will only require that this evidence exists + given IsOff[Off] = new IsOff[Off] + +@implicitNotFound("State must be On") +class IsOn[S <: State] +object IsOn: + // will not exist at runtime, the compiler will only + // require that this evidence exists at compile time + erased given IsOn[On] = new IsOn[On] + +class Machine[S <: State] private (): + // ev will disappear from both functions + def turnedOn(using erased ev: IsOff[S]): Machine[On] = new Machine[On] + def turnedOff(using erased ev: IsOn[S]): Machine[Off] = new Machine[Off] + +object Machine: + def newMachine(): Machine[Off] = new Machine[Off] + +@main def test = + val m = Machine.newMachine() + m.turnedOn + m.turnedOn.turnedOff + + // m.turnedOff + // ^ + // State must be On + + // m.turnedOn.turnedOn + // ^ + // State must be Off +``` + +Note that in [Inline](../metaprogramming/inline.md) we discussed `erasedValue` and inline +matches. `erasedValue` is implemented with `erased`, so the state machine above +can be encoded as follows: + +```scala +import scala.compiletime.* + +sealed trait State +final class On extends State +final class Off extends State + +class Machine[S <: State]: + transparent inline def turnOn(): Machine[On] = + inline erasedValue[S] match + case _: Off => new Machine[On] + case _: On => error("Turning on an already turned on machine") + + transparent inline def turnOff(): Machine[Off] = + inline erasedValue[S] match + case _: On => new Machine[Off] + case _: Off => error("Turning off an already turned off machine") + +object Machine: + def newMachine(): Machine[Off] = + println("newMachine") + new Machine[Off] +end Machine + +@main def test = + val m = Machine.newMachine() + m.turnOn() + m.turnOn().turnOff() + m.turnOn().turnOn() // error: Turning on an already turned on machine +``` + +## Erased Classes + +`erased` can also be used as a modifier for a class. An erased class is intended to be used only in erased definitions. If the type of a val definition or parameter is +a (possibly aliased, refined, or instantiated) erased class, the definition is assumed to be `erased` itself. Likewise, a method with an erased class return type is assumed to be `erased` itself. Since given instances expand to vals and defs, they are also assumed to be erased if the type they produce is an erased class. Finally +function types with erased classes as arguments turn into erased function types. + +Example: +```scala +erased class CanRead + +val x: CanRead = ... // `x` is turned into an erased val +val y: CanRead => Int = ... // the function is turned into an erased function +def f(x: CanRead) = ... // `f` takes an erased parameter +def g(): CanRead = ... // `g` is turned into an erased def +given CanRead = ... // the anonymous given is assumed to be erased +``` +The code above expands to +```scala +erased class CanRead + +erased val x: CanRead = ... +val y: (erased CanRead) => Int = ... +def f(erased x: CanRead) = ... +erased def g(): CanRead = ... +erased given CanRead = ... +``` +After erasure, it is checked that no references to values of erased classes remain and that no instances of erased classes are created. So the following would be an error: +```scala +val err: Any = CanRead() // error: illegal reference to erased class CanRead +``` +Here, the type of `err` is `Any`, so `err` is not considered erased. Yet its initializing value is a reference to the erased class `CanRead`. + +[More Details](./erased-defs-spec.md) diff --git a/docs/_spec/TODOreference/experimental/explicit-nulls.md b/docs/_spec/TODOreference/experimental/explicit-nulls.md new file mode 100644 index 000000000000..b3fa53429cfe --- /dev/null +++ b/docs/_spec/TODOreference/experimental/explicit-nulls.md @@ -0,0 +1,543 @@ +--- +layout: doc-page +title: "Explicit Nulls" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/explicit-nulls.html +--- + +Explicit nulls is an opt-in feature that modifies the Scala type system, which makes reference types +(anything that extends [`AnyRef`](https://scala-lang.org/api/3.x/scala/AnyRef.html)) _non-nullable_. + +This means the following code will no longer typecheck: + +```scala +val x: String = null // error: found `Null`, but required `String` +``` + +Instead, to mark a type as nullable we use a [union type](../new-types/union-types.md) + +```scala +val x: String | Null = null // ok +``` + +A nullable type could have null value during runtime; hence, it is not safe to select a member without checking its nullity. + +```scala +x.trim // error: trim is not member of String | Null +``` + +Explicit nulls are enabled via a `-Yexplicit-nulls` flag. + +Read on for details. + +## New Type Hierarchy + +Originally, `Null` is a subtype of all reference types. + +!["Original Type Hierarchy"](images/explicit-nulls/scalaHierarchyWithMatchable.png) + +When explicit nulls is enabled, the type hierarchy changes so that `Null` is only +a subtype of `Any` and `Matchable`, as opposed to every reference type, +which means `null` is no longer a value of `AnyRef` and its subtypes. + +This is the new type hierarchy: + +!["Type Hierarchy for Explicit Nulls"](images/explicit-nulls/scalaHierarchyWithMatchableAndSafeNull.png) + +After erasure, `Null` remains a subtype of all reference types (as forced by the JVM). + +## Working with `Null` + +To make working with nullable values easier, we propose adding a few utilities to the standard library. +So far, we have found the following useful: + +- An extension method `.nn` to "cast away" nullability + + ```scala + extension [T](x: T | Null) + inline def nn: T = + assert(x != null) + x.asInstanceOf[T] + ``` + + This means that given `x: String | Null`, `x.nn` has type `String`, so we can call all the + usual methods on it. Of course, `x.nn` will throw a NPE if `x` is `null`. + + Don't use `.nn` on mutable variables directly, because it may introduce an unknown type into the type of the variable. + +- An [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) language feature. + + When imported, `T | Null` can be used as `T`, similar to regular Scala (without explicit nulls). + + See [UnsafeNulls](#unsafenulls) section for more details. + +## Unsoundness + +The new type system is unsound with respect to `null`. This means there are still instances where an expression has a non-nullable type like `String`, but its value is actually `null`. + +The unsoundness happens because uninitialized fields in a class start out as `null`: + +```scala +class C: + val f: String = foo(f) + def foo(f2: String): String = f2 + +val c = new C() +// c.f == "field is null" +``` + +The unsoundness above can be caught by the compiler with the option `-Ysafe-init`. +More details can be found in [safe initialization](../other-new-features/safe-initialization.md). + +## Equality + +We don't allow the double-equal (`==` and `!=`) and reference (`eq` and `ne`) comparison between +`AnyRef` and `Null` anymore, since a variable with a non-nullable type cannot have `null` as value. +`null` can only be compared with `Null`, nullable union (`T | Null`), or `Any` type. + +For some reason, if we really want to compare `null` with non-null values, we have to provide a type hint (e.g. `: Any`). + +```scala +val x: String = ??? +val y: String | Null = ??? + +x == null // error: Values of types String and Null cannot be compared with == or != +x eq null // error +"hello" == null // error + +y == null // ok +y == x // ok + +(x: String | Null) == null // ok +(x: Any) == null // ok +``` + +## Java Interoperability + +The Scala compiler can load Java classes in two ways: from source or from bytecode. In either case, +when a Java class is loaded, we "patch" the type of its members to reflect that Java types +remain implicitly nullable. + +Specifically, we patch + +- the type of fields + +- the argument type and return type of methods + +We illustrate the rules with following examples: + +- The first two rules are easy: we nullify reference types but not value types. + + ```java + class C { + String s; + int x; + } + ``` + + ==> + + ```scala + class C: + val s: String | Null + val x: Int + ``` + +- We nullify type parameters because in Java a type parameter is always nullable, so the following code compiles. + + ```java + class C { T foo() { return null; } } + ``` + + ==> + + ```scala + class C[T] { def foo(): T | Null } + ``` + + Notice this is rule is sometimes too conservative, as witnessed by + + ```scala + class InScala: + val c: C[Bool] = ??? // C as above + val b: Bool = c.foo() // no longer typechecks, since foo now returns Bool | Null + ``` + +- We can reduce the number of redundant nullable types we need to add. Consider + + ```java + class Box { T get(); } + class BoxFactory { Box makeBox(); } + ``` + + ==> + + ```scala + class Box[T] { def get(): T | Null } + class BoxFactory[T] { def makeBox(): Box[T] | Null } + ``` + + Suppose we have a `BoxFactory[String]`. Notice that calling `makeBox()` on it returns a + `Box[String] | Null`, not a `Box[String | Null] | Null`. This seems at first + glance unsound ("What if the box itself has `null` inside?"), but is sound because calling + `get()` on a `Box[String]` returns a `String | Null`. + + Notice that we need to patch _all_ Java-defined classes that transitively appear in the + argument or return type of a field or method accessible from the Scala code being compiled. + Absent crazy reflection magic, we think that all such Java classes _must_ be visible to + the Typer in the first place, so they will be patched. + +- We will append `Null` to the type arguments if the generic class is defined in Scala. + + ```java + class BoxFactory { + Box makeBox(); // Box is Scala-defined + List>> makeCrazyBoxes(); // List is Java-defined + } + ``` + + ==> + + ```scala + class BoxFactory[T]: + def makeBox(): Box[T | Null] | Null + def makeCrazyBoxes(): java.util.List[Box[java.util.List[T] | Null]] | Null + ``` + + In this case, since `Box` is Scala-defined, we will get `Box[T | Null] | Null`. + This is needed because our nullability function is only applied (modularly) to the Java + classes, but not to the Scala ones, so we need a way to tell `Box` that it contains a + nullable value. + + The `List` is Java-defined, so we don't append `Null` to its type argument. But we + still need to nullify its inside. + +- We don't nullify _simple_ literal constant (`final`) fields, since they are known to be non-null + + ```java + class Constants { + final String NAME = "name"; + final int AGE = 0; + final char CHAR = 'a'; + + final String NAME_GENERATED = getNewName(); + } + ``` + + ==> + + ```scala + class Constants: + val NAME: String("name") = "name" + val AGE: Int(0) = 0 + val CHAR: Char('a') = 'a' + + val NAME_GENERATED: String | Null = getNewName() + ``` + +- We don't append `Null` to a field nor to a return type of a method which is annotated with a + `NotNull` annotation. + + ```java + class C { + @NotNull String name; + @NotNull List getNames(String prefix); // List is Java-defined + @NotNull Box getBoxedName(); // Box is Scala-defined + } + ``` + + ==> + + ```scala + class C: + val name: String + def getNames(prefix: String | Null): java.util.List[String] // we still need to nullify the paramter types + def getBoxedName(): Box[String | Null] // we don't append `Null` to the outmost level, but we still need to nullify inside + ``` + + The annotation must be from the list below to be recognized as `NotNull` by the compiler. + Check `Definitions.scala` for an updated list. + + ```scala + // A list of annotations that are commonly used to indicate + // that a field/method argument or return type is not null. + // These annotations are used by the nullification logic in + // JavaNullInterop to improve the precision of type nullification. + // We don't require that any of these annotations be present + // in the class path, but we want to create Symbols for the + // ones that are present, so they can be checked during nullification. + @tu lazy val NotNullAnnots: List[ClassSymbol] = ctx.getClassesIfDefined( + "javax.annotation.Nonnull" :: + "edu.umd.cs.findbugs.annotations.NonNull" :: + "androidx.annotation.NonNull" :: + "android.support.annotation.NonNull" :: + "android.annotation.NonNull" :: + "com.android.annotations.NonNull" :: + "org.eclipse.jdt.annotation.NonNull" :: + "org.checkerframework.checker.nullness.qual.NonNull" :: + "org.checkerframework.checker.nullness.compatqual.NonNullDecl" :: + "org.jetbrains.annotations.NotNull" :: + "lombok.NonNull" :: + "io.reactivex.annotations.NonNull" :: Nil map PreNamedString) + ``` + +### Override check + +When we check overriding between Scala classes and Java classes, the rules are relaxed for [`Null`](https://scala-lang.org/api/3.x/scala/Null.html) type with this feature, in order to help users to working with Java libraries. + +Suppose we have Java method `String f(String x)`, we can override this method in Scala in any of the following forms: + +```scala +def f(x: String | Null): String | Null + +def f(x: String): String | Null + +def f(x: String | Null): String + +def f(x: String): String +``` + +Note that some of the definitions could cause unsoundness. For example, the return type is not nullable, but a `null` value is actually returned. + +## Flow Typing + +We added a simple form of flow-sensitive type inference. The idea is that if `p` is a +stable path or a trackable variable, then we can know that `p` is non-null if it's compared +with `null`. This information can then be propagated to the `then` and `else` branches +of an if-statement (among other places). + +Example: + +```scala +val s: String | Null = ??? +if s != null then + // s: String + +// s: String | Null + +assert(s != null) +// s: String +``` + +A similar inference can be made for the `else` case if the test is `p == null` + +```scala +if s == null then + // s: String | Null +else + // s: String +``` + +`==` and `!=` is considered a comparison for the purposes of the flow inference. + +### Logical Operators + +We also support logical operators (`&&`, `||`, and `!`): + +```scala +val s: String | Null = ??? +val s2: String | Null = ??? +if s != null && s2 != null then + // s: String + // s2: String + +if s == null || s2 == null then + // s: String | Null + // s2: String | Null +else + // s: String + // s2: String +``` + +### Inside Conditions + +We also support type specialization _within_ the condition, taking into account that `&&` and `||` are short-circuiting: + +```scala +val s: String | Null = ??? + +if s != null && s.length > 0 then // s: String in `s.length > 0` + // s: String + +if s == null || s.length > 0 then // s: String in `s.length > 0` + // s: String | Null +else + // s: String +``` + +### Match Case + +The non-null cases can be detected in match statements. + +```scala +val s: String | Null = ??? + +s match + case _: String => // s: String + case _ => +``` + +### Mutable Variable + +We are able to detect the nullability of some local mutable variables. A simple example is: + +```scala +class C(val x: Int, val next: C | Null) + +var xs: C | Null = C(1, C(2, null)) +// xs is trackable, since all assignments are in the same method +while xs != null do + // xs: C + val xsx: Int = xs.x + val xscpy: C = xs + xs = xscpy // since xscpy is non-null, xs still has type C after this line + // xs: C + xs = xs.next // after this assignment, xs can be null again + // xs: C | Null +``` + +When dealing with local mutable variables, there are two questions: + +1. Whether to track a local mutable variable during flow typing. + We track a local mutable variable if the variable is not assigned in a closure. + For example, in the following code `x` is assigned to by the closure `y`, so we do not + do flow typing on `x`. + + ```scala + var x: String | Null = ??? + def y = + x = null + + if x != null then + // y can be called here, which would break the fact + val a: String = x // error: x is captured and mutated by the closure, not trackable + ``` + +2. Whether to generate and use flow typing on a specific _use_ of a local mutable variable. + We only want to do flow typing on a use that belongs to the same method as the definition + of the local variable. + For example, in the following code, even `x` is not assigned to by a closure, we can only + use flow typing in one of the occurrences (because the other occurrence happens within a + nested closure). + + ```scala + var x: String | Null = ??? + def y = + if x != null then + // not safe to use the fact (x != null) here + // since y can be executed at the same time as the outer block + val _: String = x + if x != null then + val a: String = x // ok to use the fact here + x = null + ``` + +See [more examples](https://github.com/lampepfl/dotty/blob/main/tests/explicit-nulls/neg/flow-varref-in-closure.scala). + +Currently, we are unable to track paths with a mutable variable prefix. +For example, `x.a` if `x` is mutable. + +### Unsupported Idioms + +We don't support: + +- flow facts not related to nullability (`if x == 0 then { // x: 0.type not inferred }`) +- tracking aliasing between non-nullable paths + + ```scala + val s: String | Null = ??? + val s2: String | Null = ??? + if s != null && s == s2 then + // s: String inferred + // s2: String not inferred + ``` + +### UnsafeNulls + +It is difficult to work with many nullable values, we introduce a language feature [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html). +Inside this "unsafe" scope, all `T | Null` values can be used as `T`. + +Users can import [`scala.language.unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) to create such scopes, or use `-language:unsafeNulls` to enable this feature globally (for migration purpose only). + +Assume `T` is a reference type (a subtype of `AnyRef`), the following unsafe operation rules are +applied in this unsafe-nulls scope: + +1. the members of `T` can be found on `T | Null` + +2. a value with type `T` can be compared with `T | Null` and `Null` + +3. suppose `T1` is not a subtype of `T2` using explicit-nulls subtyping (where `Null` is a direct +subtype of Any), extension methods and implicit conversions designed for `T2` can be used for +`T1` if `T1` is a subtype of `T2` using regular subtyping rules (where `Null` is a subtype of every +reference type) + +4. suppose `T1` is not a subtype of `T2` using explicit-nulls subtyping, a value with type `T1` +can be used as `T2` if `T1` is a subtype of `T2` using regular subtyping rules + +Addtionally, `null` can be used as `AnyRef` (`Object`), which means you can select `.eq` or `.toString` on it. + +The program in [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) will have a **similar** semantic as regular Scala, but not **equivalent**. + +For example, the following code cannot be compiled even using unsafe nulls. Because of the +Java interoperation, the type of the get method becomes `T | Null`. + +```scala +def head[T](xs: java.util.List[T]): T = xs.get(0) // error +``` + +Since the compiler doesn’t know whether `T` is a reference type, it is unable to cast `T | Null` +to `T`. A `.nn` need to be inserted after `xs.get(0)` by user manually to fix the error, which +strips the `Null` from its type. + +The intention of this [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) is to give users a better migration path for explicit nulls. +Projects for Scala 2 or regular Scala 3 can try this by adding `-Yexplicit-nulls -language:unsafeNulls` +to the compile options. A small number of manual modifications are expected. To migrate to the full +explicit nulls feature in the future, `-language:unsafeNulls` can be dropped and add +`import scala.language.unsafeNulls` only when needed. + +```scala +def f(x: String): String = ??? +def nullOf[T >: Null]: T = null + +import scala.language.unsafeNulls + +val s: String | Null = ??? +val a: String = s // unsafely convert String | Null to String + +val b1 = s.trim // call .trim on String | Null unsafely +val b2 = b1.length + +f(s).trim // pass String | Null as an argument of type String unsafely + +val c: String = null // Null to String + +val d1: Array[String] = ??? +val d2: Array[String | Null] = d1 // unsafely convert Array[String] to Array[String | Null] +val d3: Array[String] = Array(null) // unsafe + +class C[T >: Null <: String] // define a type bound with unsafe conflict bound + +val n = nullOf[String] // apply a type bound unsafely +``` + +Without the [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html), all these unsafe operations will not be type-checked. + +[`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) also works for extension methods and implicit search. + +```scala +import scala.language.unsafeNulls + +val x = "hello, world!".split(" ").map(_.length) + +given Conversion[String, Array[String]] = _ => ??? + +val y: String | Null = ??? +val z: Array[String | Null] = y +``` + +## Binary Compatibility + +Our strategy for binary compatibility with Scala binaries that predate explicit nulls +and new libraries compiled without `-Yexplicit-nulls` is to leave the types unchanged +and be compatible but unsound. + +[More details](https://dotty.epfl.ch/docs/internals/explicit-nulls.html) diff --git a/docs/_spec/TODOreference/experimental/fewer-braces.md b/docs/_spec/TODOreference/experimental/fewer-braces.md new file mode 100644 index 000000000000..eb454886ad03 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/fewer-braces.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Fewer Braces" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/fewer-braces.html +--- + +The documentation contained in this file is now part of [./indentation.html]. \ No newline at end of file diff --git a/docs/_spec/TODOreference/experimental/main-annotation.md b/docs/_spec/TODOreference/experimental/main-annotation.md new file mode 100644 index 000000000000..0c60e1050b87 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/main-annotation.md @@ -0,0 +1,97 @@ +--- +layout: doc-page +title: "MainAnnotation" +--- + +`MainAnnotation` provides a generic way to define main annotations such as `@main`. + +When a users annotates a method with an annotation that extends `MainAnnotation` a class with a `main` method will be generated. The main method will contain the code needed to parse the command line arguments and run the application. + +```scala +/** Sum all the numbers + * + * @param first Fist number to sum + * @param rest The rest of the numbers to sum + */ +@myMain def sum(first: Int, second: Int = 0, rest: Int*): Int = first + second + rest.sum +``` + +```scala +object foo { + def main(args: Array[String]): Unit = { + val mainAnnot = new myMain() + val info = new Info( + name = "foo.main", + documentation = "Sum all the numbers", + parameters = Seq( + new Parameter("first", "scala.Int", hasDefault=false, isVarargs=false, "Fist number to sum", Seq()), + new Parameter("second", "scala.Int", hasDefault=true, isVarargs=false, "", Seq()), + new Parameter("rest", "scala.Int" , hasDefault=false, isVarargs=true, "The rest of the numbers to sum", Seq()) + ) + ) + val mainArgsOpt = mainAnnot.command(info, args) + if mainArgsOpt.isDefined then + val mainArgs = mainArgsOpt.get + val args0 = mainAnnot.argGetter[Int](info.parameters(0), mainArgs(0), None) // using a parser of Int + val args1 = mainAnnot.argGetter[Int](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) // using a parser of Int + val args2 = mainAnnot.varargGetter[Int](info.parameters(2), mainArgs.drop(2)) // using a parser of Int + mainAnnot.run(() => sum(args0(), args1(), args2()*)) + } +} +``` + +The implementation of the `main` method first instantiates the annotation and then call `command`. +When calling the `command`, the arguments can be checked and preprocessed. +Then it defines a series of argument getters calling `argGetter` for each parameter and `varargGetter` for the last one if it is a varargs. `argGetter` gets an optional lambda that computes the default argument. +Finally, the `run` method is called to run the application. It receives a by-name argument that contains the call the annotated method with the instantiations arguments (using the lambdas from `argGetter`/`varargGetter`). + + +Example of implementation of `myMain` that takes all arguments positionally. It used `util.CommandLineParser.FromString` and expects no default arguments. For simplicity, any errors in preprocessing or parsing results in crash. + +```scala +// Parser used to parse command line arguments +import scala.util.CommandLineParser.FromString[T] + +// Result type of the annotated method is Int and arguments are parsed using FromString +@experimental class myMain extends MainAnnotation[FromString, Int]: + import MainAnnotation.{ Info, Parameter } + + def command(info: Info, args: Seq[String]): Option[Seq[String]] = + if args.contains("--help") then + println(info.documentation) + None // do not parse or run the program + else if info.parameters.exists(_.hasDefault) then + println("Default arguments are not supported") + None + else if info.hasVarargs then + val numPlainArgs = info.parameters.length - 1 + if numPlainArgs > args.length then + println("Not enough arguments") + None + else + Some(args) + else + if info.parameters.length > args.length then + println("Not enough arguments") + None + else if info.parameters.length < args.length then + println("Too many arguments") + None + else + Some(args) + + def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using parser: FromString[T]): () => T = + () => parser.fromString(arg) + + def varargGetter[T](param: Parameter, args: Seq[String])(using parser: FromString[T]): () => Seq[T] = + () => args.map(arg => parser.fromString(arg)) + + def run(program: () => Int): Unit = + println("executing program") + + val result = program() + println("result: " + result) + println("executed program") + +end myMain +``` diff --git a/docs/_spec/TODOreference/experimental/named-typeargs-spec.md b/docs/_spec/TODOreference/experimental/named-typeargs-spec.md new file mode 100644 index 000000000000..9e1113bbac86 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/named-typeargs-spec.md @@ -0,0 +1,41 @@ +--- +layout: doc-page +title: "Named Type Arguments - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/named-typeargs-spec.html +--- + +In this section we give more details about the [named type arguments](named-typeargs.md) (*experimental*). + +## Syntax + +The addition to the grammar is: + +``` +SimpleExpr1 ::= ... + | SimpleExpr (TypeArgs | NamedTypeArgs) +NamedTypeArgs ::= ‘[’ NamedTypeArg {‘,’ NamedTypeArg} ‘]’ +NamedTypeArg ::= id ‘=’ Type +``` + +Note in particular that named arguments cannot be passed to type constructors: + +``` scala +class C[T] + +val x: C[T = Int] = // error + new C[T = Int] // error + +class E extends C[T = Int] // error +``` + +## Compatibility considerations + +Named type arguments do not have an impact on binary compatibility, but they +have an impact on source compatibility: if the name of a method type parameter +is changed, any existing named reference to this parameter will break. This +means that the names of method type parameters are now part of the public API +of a library. + +(Unimplemented proposal: to mitigate this, +[`scala.deprecatedName`](https://www.scala-lang.org/api/current/scala/deprecatedName.html) +could be extended to also be applicable on method type parameters.) diff --git a/docs/_spec/TODOreference/experimental/named-typeargs.md b/docs/_spec/TODOreference/experimental/named-typeargs.md new file mode 100644 index 000000000000..4928a40f8a6a --- /dev/null +++ b/docs/_spec/TODOreference/experimental/named-typeargs.md @@ -0,0 +1,34 @@ +--- +layout: doc-page +title: "Named Type Arguments" +redirectFrom: /docs/reference/other-new-features/named-typeargs.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/named-typeargs.html +--- + +**Note:** This feature is implemented in Scala 3, but is not expected to be part of Scala 3.0. + +Type arguments of methods can now be specified by name as well as by position. Example: + +``` scala +def construct[Elem, Coll[_]](xs: Elem*): Coll[Elem] = ??? + +val xs1 = construct[Coll = List, Elem = Int](1, 2, 3) +val xs2 = construct[Coll = List](1, 2, 3) +``` + +Similar to a named value argument `(x = e)`, a named type argument +`[X = T]` instantiates the type parameter `X` to the type `T`. +Named type arguments do not have to be in order (see `xs1` above) and +unspecified arguments are inferred by the compiler (see `xs2` above). +Type arguments must be all named or un-named, mixtures of named and +positional type arguments are not supported. + +## Motivation + +The main benefit of named type arguments is that unlike positional arguments, +you are allowed to omit passing arguments for some parameters, like in the +definition of `xs2` above. A missing type argument is inferred as usual by +local type inference. This is particularly useful in situations where some type +arguments can be easily inferred from others. + +[More details](./named-typeargs-spec.md) diff --git a/docs/_spec/TODOreference/experimental/numeric-literals.md b/docs/_spec/TODOreference/experimental/numeric-literals.md new file mode 100644 index 000000000000..f493ef459265 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/numeric-literals.md @@ -0,0 +1,257 @@ +--- +layout: doc-page +title: "Numeric Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/numeric-literals.html +--- + +**Note**: This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: + +```scala +import scala.language.experimental.genericNumberLiterals +``` + +In Scala 2, numeric literals were confined to the primitive numeric types `Int`, `Long`, `Float`, and `Double`. Scala 3 allows to write numeric literals also for user-defined types. Example: + +```scala +val x: Long = -10_000_000_000 +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 110_222_799_799.99 + +(y: BigInt) match + case 123_456_789_012_345_678_901 => +``` + +The syntax of numeric literals is the same as before, except there are no pre-set limits +how large they can be. + +## Meaning of Numeric Literals + +The meaning of a numeric literal is determined as follows: + +- If the literal ends with `l` or `L`, it is a `Long` integer (and must fit in its legal range). +- If the literal ends with `f` or `F`, it is a single precision floating point number of type `Float`. +- If the literal ends with `d` or `D`, it is a double precision floating point number of type `Double`. + +In each of these cases the conversion to a number is exactly as in Scala 2 or in Java. If a numeric literal does _not_ end in one of these suffixes, its meaning is determined by the expected type: + +1. If the expected type is `Int`, `Long`, `Float`, or `Double`, the literal is + treated as a standard literal of that type. +2. If the expected type is a fully defined type `T` that has a given instance of type + [`scala.util.FromDigits[T]`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html), the literal is converted to a value of type `T` by passing it as an argument to + the `fromDigits` method of that instance (more details below). +3. Otherwise, the literal is treated as a `Double` literal (if it has a decimal point or an + exponent), or as an `Int` literal (if not). (This last possibility is again as in Scala 2 or Java.) + +With these rules, the definition + +```scala +val x: Long = -10_000_000_000 +``` + +is legal by rule (1), since the expected type is `Long`. The definitions + +```scala +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 111222333444.55 +``` + +are legal by rule (2), since both `BigInt` and `BigDecimal` have [`FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html) instances (which implement the `FromDigits` subclasses [`FromDigits.WithRadix`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$WithRadix.html) and [`FromDigits.Decimal`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$Decimal.html), respectively). On the other hand, + +```scala +val x = -10_000_000_000 +``` + +gives a type error, since without an expected type `-10_000_000_000` is treated by rule (3) as an `Int` literal, but it is too large for that type. + +## The `FromDigits` Trait + +To allow numeric literals, a type simply has to define a `given` instance of the +[`scala.util.FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html) type class, or one of its subclasses. `FromDigits` is defined as follows: + +```scala +trait FromDigits[T]: + def fromDigits(digits: String): T +``` + +Implementations of `fromDigits` convert strings of digits to the values of the +implementation type `T`. +The `digits` string consists of digits between `0` and `9`, possibly preceded by a +sign ("+" or "-"). Number separator characters `_` are filtered out before +the string is passed to `fromDigits`. + +The companion object [`FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits$.html) also defines subclasses of `FromDigits` for whole numbers with a given radix, for numbers with a decimal point, and for numbers that can have both a decimal point and an exponent: + +```scala +object FromDigits: + + /** A subclass of `FromDigits` that also allows to convert whole + * number literals with a radix other than 10 + */ + trait WithRadix[T] extends FromDigits[T]: + def fromDigits(digits: String): T = fromDigits(digits, 10) + def fromDigits(digits: String, radix: Int): T + + /** A subclass of `FromDigits` that also allows to convert number + * literals containing a decimal point ".". + */ + trait Decimal[T] extends FromDigits[T] + + /** A subclass of `FromDigits`that allows also to convert number + * literals containing a decimal point "." or an + * exponent `('e' | 'E')['+' | '-']digit digit*`. + */ + trait Floating[T] extends Decimal[T] +``` + +A user-defined number type can implement one of those, which signals to the compiler +that hexadecimal numbers, decimal points, or exponents are also accepted in literals +for this type. + +## Error Handling + +`FromDigits` implementations can signal errors by throwing exceptions of some subtype +of [`FromDigitsException`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$FromDigitsException.html). `FromDigitsException` is defined with three subclasses in the +`FromDigits` object as follows: + +```scala +abstract class FromDigitsException(msg: String) extends NumberFormatException(msg) + +class NumberTooLarge (msg: String = "number too large") extends FromDigitsException(msg) +class NumberTooSmall (msg: String = "number too small") extends FromDigitsException(msg) +class MalformedNumber(msg: String = "malformed number literal") extends FromDigitsException(msg) +``` + +## Example + +As a fully worked out example, here is an implementation of a new numeric class, `BigFloat`, that accepts numeric literals. `BigFloat` is defined in terms of a `BigInt` mantissa and an `Int` exponent: + +```scala +case class BigFloat(mantissa: BigInt, exponent: Int): + override def toString = s"${mantissa}e${exponent}" +``` + +`BigFloat` literals can have a decimal point as well as an exponent. E.g. the following expression +should produce the `BigFloat` number `BigFloat(-123, 997)`: + +```scala +-0.123E+1000: BigFloat +``` + +The companion object of `BigFloat` defines an `apply` constructor method to construct a `BigFloat` +from a `digits` string. Here is a possible implementation: + +```scala +object BigFloat: + import scala.util.FromDigits + + def apply(digits: String): BigFloat = + val (mantissaDigits, givenExponent) = + digits.toUpperCase.split('E') match + case Array(mantissaDigits, edigits) => + val expo = + try FromDigits.intFromDigits(edigits) + catch case ex: FromDigits.NumberTooLarge => + throw FromDigits.NumberTooLarge(s"exponent too large: $edigits") + (mantissaDigits, expo) + case Array(mantissaDigits) => + (mantissaDigits, 0) + val (intPart, exponent) = + mantissaDigits.split('.') match + case Array(intPart, decimalPart) => + (intPart ++ decimalPart, givenExponent - decimalPart.length) + case Array(intPart) => + (intPart, givenExponent) + BigFloat(BigInt(intPart), exponent) +``` + +To accept `BigFloat` literals, all that's needed in addition is a `given` instance of type +`FromDigits.Floating[BigFloat]`: + +```scala + given FromDigits: FromDigits.Floating[BigFloat] with + def fromDigits(digits: String) = apply(digits) +end BigFloat +``` + +Note that the `apply` method does not check the format of the `digits` argument. It is +assumed that only valid arguments are passed. For calls coming from the compiler +that assumption is valid, since the compiler will first check whether a numeric +literal has the correct format before it gets passed on to a conversion method. + +## Compile-Time Errors + +With the setup of the previous section, a literal like + +```scala +1e10_0000_000_000: BigFloat +``` + +would be expanded by the compiler to + +```scala +BigFloat.FromDigits.fromDigits("1e100000000000") +``` + +Evaluating this expression throws a [`NumberTooLarge`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$NumberTooLarge.html) exception at run time. We would like it to +produce a compile-time error instead. We can achieve this by tweaking the `BigFloat` class +with a small dose of metaprogramming. The idea is to turn the `fromDigits` method +into a macro, i.e. make it an inline method with a splice as right-hand side. +To do this, replace the `FromDigits` instance in the `BigFloat` object by the following two definitions: + +```scala +object BigFloat: + ... + + class FromDigits extends FromDigits.Floating[BigFloat]: + def fromDigits(digits: String) = apply(digits) + + given FromDigits with + override inline def fromDigits(digits: String) = ${ + fromDigitsImpl('digits) + } +``` + +Note that an inline method cannot directly fill in for an abstract method, since it produces +no code that can be executed at runtime. That is why we define an intermediary class +`FromDigits` that contains a fallback implementation which is then overridden by the inline +method in the `FromDigits` given instance. That method is defined in terms of a macro +implementation method `fromDigitsImpl`. Here is its definition: + +```scala + private def fromDigitsImpl(digits: Expr[String])(using ctx: Quotes): Expr[BigFloat] = + digits.value match + case Some(ds) => + try + val BigFloat(m, e) = apply(ds) + '{BigFloat(${Expr(m)}, ${Expr(e)})} + catch case ex: FromDigits.FromDigitsException => + ctx.error(ex.getMessage) + '{BigFloat(0, 0)} + case None => + '{apply($digits)} +end BigFloat +``` + +The macro implementation takes an argument of type `Expr[String]` and yields +a result of type `Expr[BigFloat]`. It tests whether its argument is a constant +string. If that is the case, it converts the string using the `apply` method +and lifts the resulting `BigFloat` back to `Expr` level. For non-constant +strings `fromDigitsImpl(digits)` is simply `apply(digits)`, i.e. everything is +evaluated at runtime in this case. + +The interesting part is the `catch` part of the case where `digits` is constant. +If the `apply` method throws a `FromDigitsException`, the exception's message is issued as a compile time error in the `ctx.error(ex.getMessage)` call. + +With this new implementation, a definition like + +```scala +val x: BigFloat = 1234.45e3333333333 +``` + +would give a compile time error message: + +```scala +3 | val x: BigFloat = 1234.45e3333333333 + | ^^^^^^^^^^^^^^^^^^ + | exponent too large: 3333333333 +``` diff --git a/docs/_spec/TODOreference/experimental/overview.md b/docs/_spec/TODOreference/experimental/overview.md new file mode 100644 index 000000000000..254f103896e4 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/overview.md @@ -0,0 +1,29 @@ +--- +layout: doc-page +title: "Experimental" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/overview.html +redirectFrom: overview.html +--- + +## Experimental language features + +All experimental language features can be found under the `scala.language.experimental` package. +They are enabled by importing the feature or using the `-language` compiler flag. + +* [`erasedDefinitions`](./erased-defs.md): Enable support for `erased` modifier. +* `fewerBraces`: Enable support for using indentation for arguments. +* [`genericNumberLiterals`](./numeric-literals.md): Enable support for generic number literals. +* [`namedTypeArguments`](./named-typeargs.md): Enable support for named type arguments +* [`saferExceptions`](./canthrow.md): Enable support for checked exceptions. + +## Experimental language imports + +In general, experimental language features can be imported in an experimental scope (see [experimental definitions](../other-new-features/experimental-defs.md)). +They can be imported at the top-level if all top-level definitions are `@experimental`. + +## Experimental language features supported by special compiler options + +Some experimental language features that are still in research and development can be enabled with special compiler options. These include + +* [`-Yexplicit-nulls`](./explicit-nulls.md). Enable support for tracking null references in the type system. +* [`-Ycc`](./cc.md). Enable support for capture checking. diff --git a/docs/_spec/TODOreference/experimental/tupled-function.md b/docs/_spec/TODOreference/experimental/tupled-function.md new file mode 100644 index 000000000000..da108fc832ad --- /dev/null +++ b/docs/_spec/TODOreference/experimental/tupled-function.md @@ -0,0 +1,82 @@ +--- +layout: doc-page +title: "Tupled Function" +--- + +Tupled Function +---------------------- + +With functions bounded to arities up to 22 it was possible to generalize some operation on all function types using overloading. +Now that we have functions and tuples generalized to [arities above 22](../dropped-features/limit22.md) overloading is not an option anymore. +The type class `TupleFunction` provides a way to abstract directly over a function of any arity converting it to an equivalent function that receives all arguments in a single tuple. + +```scala +/** Type class relating a `FunctionN[..., R]` with an equivalent tupled function `Function1[TupleN[...], R]` + * + * @tparam F a function type + * @tparam G a tupled function type (function of arity 1 receiving a tuple as argument) + */ +@implicitNotFound("${F} cannot be tupled as ${G}") +sealed trait TupledFunction[F, G] { + def tupled(f: F): G + def untupled(g: G): F +} +``` + +The compiler will synthesize an instance of `TupledFunction[F, G]` if: + +* `F` is a function type of arity `N` +* `G` is a function with a single tuple argument of size `N` and its types are equal to the arguments of `F` +* The return type of `F` is equal to the return type of `G` +* `F` and `G` are the same sort of function (both are `(...) => R` or both are `(...) ?=> R`) +* If only one of `F` or `G` is instantiated the second one is inferred. + +Examples +-------- +`TupledFunction` can be used to generalize the `Function1.tupled`, ... `Function22.tupled` methods to functions of any arities. +The following defines `tupled` as [extension method](../contextual/extension-methods.html) ([full example](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-tupled.scala)). + +```scala +/** Creates a tupled version of this function: instead of N arguments, + * it accepts a single [[scala.Tuple]] with N elements as argument. + * + * @tparam F the function type + * @tparam Args the tuple type with the same types as the function arguments of F + * @tparam R the return type of F + */ +extension [F, Args <: Tuple, R](f: F) + def tupled(using tf: TupledFunction[F, Args => R]): Args => R = tf.tupled(f) +``` + +`TupledFunction` can be used to generalize the `Function.untupled` to a function of any arities ([full example](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-untupled.scala)) + +```scala +/** Creates an untupled version of this function: instead of a single argument of type [[scala.Tuple]] with N elements, + * it accepts N arguments. + * + * This is a generalization of [[scala.Function.untupled]] that work on functions of any arity + * + * @tparam F the function type + * @tparam Args the tuple type with the same types as the function arguments of F + * @tparam R the return type of F + */ +extension [F, Args <: Tuple, R](f: Args => R) + def untupled(using tf: TupledFunction[F, Args => R]): F = tf.untupled(f) +``` + +`TupledFunction` can also be used to generalize the [`Tuple1.compose`](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-compose.scala) and [`Tuple1.andThen`](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-andThen.scala) methods to compose functions of larger arities and with functions that return tuples. + +```scala +/** Composes two instances of TupledFunction into a new TupledFunction, with this function applied last. + * + * @tparam F a function type + * @tparam G a function type + * @tparam FArgs the tuple type with the same types as the function arguments of F and return type of G + * @tparam GArgs the tuple type with the same types as the function arguments of G + * @tparam R the return type of F + */ +extension [F, G, FArgs <: Tuple, GArgs <: Tuple, R](f: F) + def compose(g: G)(using tg: TupledFunction[G, GArgs => FArgs], tf: TupledFunction[F, FArgs => R]): GArgs => R = { + (x: GArgs) => tf.tupled(f)(tg.tupled(g)(x)) +} +``` diff --git a/docs/_spec/TODOreference/features-classification.md b/docs/_spec/TODOreference/features-classification.md new file mode 100644 index 000000000000..36cea3b9e72d --- /dev/null +++ b/docs/_spec/TODOreference/features-classification.md @@ -0,0 +1,199 @@ +--- +layout: doc-page +title: "A Classification of Proposed Language Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/features-classification.html +--- + +This document provides an overview of the constructs proposed for Scala 3 with the aim to facilitate the discussion what to include and when to include it. It classifies features into eight groups: (1) essential foundations, (2) simplifications, (3) restrictions, (4) dropped features, (5) changed features, (6) new features, (7) features oriented towards metaprogramming with the aim to replace existing macros, and (8) changes to type checking and inference. + +Each group contains sections classifying the status (i.e. relative importance to be a part of Scala 3, and relative urgency when to decide this) and the migration cost +of the constructs in it. + +The current document reflects the state of things as of April, 2019. It will be updated to reflect any future changes in that status. + +## Essential Foundations + +These new constructs directly model core features of [DOT](https://www.scala-lang.org/blog/2016/02/03/essence-of-scala.html), higher-kinded types, and the [SI calculus for implicit resolution](https://infoscience.epfl.ch/record/229878/files/simplicitly_1.pdf). + + - [Intersection types](new-types/intersection-types.md), replacing compound types, + - [Union types](new-types/union-types.md), + - [Type lambdas](new-types/type-lambdas.md), + replacing encodings using structural types and type projection. + - [Context functions](contextual/context-functions.md) offering abstraction over given parameters. + +**Status: essential** + +These are essential core features of Scala 3. Without them, Scala 3 would be a completely different language, with different foundations. + +**Migration cost: none to low** + +Since these are additions, there's generally no migration cost for old code. An exception are intersection types which replace compound types with slightly cleaned-up semantics. But few programs would be affected by this change. + +## Simplifications + +These constructs replace existing constructs with the aim of making the language safer and simpler to use, and to promote uniformity in code style. + + - [Trait parameters](other-new-features/trait-parameters.md) replace [early initializers](dropped-features/early-initializers.md) with a more generally useful construct. + - [Given instances](contextual/givens.md) + replace implicit objects and defs, focussing on intent over mechanism. + - [Using clauses](contextual/using-clauses.md) replace implicit parameters, avoiding their ambiguities. + - [Extension methods](contextual/extension-methods.md) replace implicit classes with a clearer and simpler mechanism. + - [Opaque type aliases](other-new-features/opaques.md) replace most uses + of value classes while guaranteeing absence of boxing. + - [Top-level definitions](dropped-features/package-objects.md) replace package objects, dropping syntactic boilerplate. + - [Export clauses](other-new-features/export.md) + provide a simple and general way to express aggregation, which can replace the + previous facade pattern of package objects inheriting from classes. + - [Vararg splices](changed-features/vararg-splices.md) now use the form `*` instead of `@ _*`, mirroring vararg expressions, + - [Creator applications](other-new-features/creator-applications.md) allow using simple function call syntax + instead of `new` expressions. `new` expressions stay around as a fallback for + the cases where creator applications cannot be used. + +With the exception of early initializers and old-style vararg splices, all superseded constructs continue to be available in Scala 3.0. The plan is to deprecate and phase them out later. + +Value classes (superseded by opaque type aliases) are a special case. There are currently no deprecation plans for value classes, since we might bring them back in a more general form if they are supported natively by the JVM as is planned by project Valhalla. + +**Status: bimodal: now or never / can delay** + +These are essential simplifications. If we decide to adopt them, we should do it for 3.0. Otherwise we are faced with the awkward situation that the Scala 3 documentation has to describe an old feature that will be replaced or superseded by a simpler one in the future. + +On the other hand, we need to decide now only about the new features in this list. The decision to drop the superseded features can be delayed. Of course, adopting a new feature without deciding to drop the superseded feature will make the language larger. + +**Migration cost: moderate** + +For the next several versions, old features will remain available and deprecation and rewrite techniques can make any migration effort low and gradual. + + +## Restrictions + +These constructs are restricted to make the language safer. + + - [Implicit Conversions](contextual/conversions.md): there is only one way to define implicit conversions instead of many, and potentially surprising implicit conversions require a language import. + - [Given Imports](contextual/given-imports.md): implicits now require a special form of import, to make the import clearly visible. + - [Type Projection](dropped-features/type-projection.md): only classes can be used as prefix `C` of a type projection `C#A`. Type projection on abstract types is no longer supported since it is unsound. + - [Multiversal equality](contextual/multiversal-equality.md) implements an "opt-in" scheme to rule out nonsensical comparisons with `==` and `!=`. + - [infix](https://github.com/lampepfl/dotty/pull/5975) + makes method application syntax uniform across code bases. + +Unrestricted implicit conversions continue to be available in Scala 3.0, but will be deprecated and removed later. Unrestricted versions of the other constructs in the list above are available only under `-source 3.0-migration`. + +**Status: now or never** + +These are essential restrictions. If we decide to adopt them, we should do it for 3.0. Otherwise we are faced with the awkward situation that the Scala 3 documentation has to describe a feature that will be restricted in the future. + +**Migration cost: low to high** + + - _low_: multiversal equality rules out code that is nonsensical, so any rewrites required by its adoption should be classified as bug fixes. + - _moderate_: Restrictions to implicits can be accommodated by straightforward rewriting. + - _high_: Unrestricted type projection cannot always rewritten directly since it is unsound in general. + +## Dropped Constructs + +These constructs are proposed to be dropped without a new construct replacing them. The motivation for dropping these constructs is to simplify the language and its implementation. + + - [DelayedInit](dropped-features/delayed-init.md), + - [Existential types](dropped-features/existential-types.md), + - [Procedure syntax](dropped-features/procedure-syntax.md), + - [Class shadowing](dropped-features/class-shadowing.md), + - [XML literals](dropped-features/xml.md), + - [Symbol literals](dropped-features/symlits.md), + - [Auto application](dropped-features/auto-apply.md), + - [Weak conformance](dropped-features/weak-conformance.md), + - [Compound types](new-types/intersection-types.md), + - [Auto tupling](https://github.com/lampepfl/dotty/pull/4311) (implemented, but not merged). + +The date when these constructs are dropped varies. The current status is: + + - Not implemented at all: + - DelayedInit, existential types, weak conformance. + - Supported under `-source 3.0-migration`: + - procedure syntax, class shadowing, symbol literals, auto application, auto tupling in a restricted form. + - Supported in 3.0, to be deprecated and phased out later: + - XML literals, compound types. + +**Status: mixed** + +Currently unimplemented features would require considerable implementation effort which would in most cases make the compiler more buggy and fragile and harder to understand. If we do not decide to drop them, they will probably show up as "not yet implemented" in the Scala 3.0 release. + +Currently implemented features could stay around indefinitely. Updated docs may simply ignore them, in the expectation that they might go away eventually. So the decision about their removal can be delayed. + +**Migration cost: moderate to high** + +Dropped features require rewrites to avoid their use in programs. These rewrites can sometimes be automatic (e.g. for procedure syntax, symbol literals, auto application) +and sometimes need to be manual (e.g. class shadowing, auto tupling). Sometimes the rewrites would have to be non-local, affecting use sites as well as definition sites (e.g., in the case of `DelayedInit`, unless we find a solution). + +## Changes + +These constructs have undergone changes to make them more regular and useful. + + - [Structural Types](changed-features/structural-types.md): They now allow pluggable implementations, which greatly increases their usefulness. Some usage patterns are restricted compared to the status quo. + - [Name-based pattern matching](changed-features/pattern-matching.md): The existing undocumented Scala 2 implementation has been codified in a slightly simplified form. + - [Eta expansion](changed-features/eta-expansion.md) is now performed universally also in the absence of an expected type. The postfix `_` operator is thus made redundant. It will be deprecated and dropped after Scala 3.0. + - [Implicit Resolution](changed-features/implicit-resolution.md): The implicit resolution rules have been cleaned up to make them more useful and less surprising. Implicit scope is restricted to no longer include package prefixes. + +Most aspects of old-style implicit resolution are still available under `-source 3.0-migration`. The other changes in this list are applied unconditionally. + +**Status: strongly advisable** + +The features have been implemented in their new form in Scala 3.0's compiler. They provide clear improvements in simplicity and functionality compared to the status quo. Going back would require significant implementation effort for a net loss of functionality. + +**Migration cost: low to high** + +Only a few programs should require changes, but some necessary changes might be non-local (as in the case of restrictions to implicit scope). + +## New Constructs + +These are additions to the language that make it more powerful or pleasant to use. + + - [Enums](enums/enums.md) provide concise syntax for enumerations and [algebraic data types](enums/adts.md). + - [Parameter untupling](other-new-features/parameter-untupling.md) avoids having to use `case` for tupled parameter destructuring. + - [Dependent function types](new-types/dependent-function-types.md) generalize dependent methods to dependent function values and types. + - [Polymorphic function types](https://github.com/lampepfl/dotty/pull/4672) generalize polymorphic methods to dependent function values and types. _Current status_: There is a proposal, and a prototype implementation, but the implementation has not been finalized or merged yet. + - [Kind polymorphism](other-new-features/kind-polymorphism.md) allows the definition of operators working equally on types and type constructors. + +**Status: mixed** + +Enums offer an essential simplification of fundamental use patterns, so they should be adopted for Scala 3.0. Auto-parameter tupling is a very small change that removes some awkwardness, so it might as well be adopted now. The other features constitute more specialized functionality which could be introduced in later versions. On the other hand, except for polymorphic function types they are all fully implemented, so if the Scala 3.0 spec does not include them, they might be still made available under a language flag. + +**Migration cost: none** + +Being new features, existing code migrates without changes. To be sure, sometimes it would be attractive to rewrite code to make use of the new features in order to increase clarity and conciseness. + +## Metaprogramming + +The following constructs together aim to put metaprogramming in Scala on a new basis. So far, metaprogramming was achieved by a combination of macros and libraries such as [Shapeless](https://github.com/milessabin/shapeless) that were in turn based on some key macros. Current Scala 2 macro mechanisms are a thin veneer on top the current Scala 2 compiler, which makes them fragile and in many cases impossible to port to Scala 3. + +It's worth noting that macros were never included in the [Scala 2 language specification](https://scala-lang.org/files/archive/spec/2.13/) and were so far made available only under an `-experimental` flag. This has not prevented their widespread usage. + +To enable porting most uses of macros, we are experimenting with the advanced language constructs listed below. These designs are more provisional than the rest of the proposed language constructs for Scala 3.0. There might still be some changes until the final release. Stabilizing the feature set needed for metaprogramming is our first priority. + +- [Match types](new-types/match-types.md) allow computation on types. +- [Inline](metaprogramming/inline.md) provides +by itself a straightforward implementation of some simple macros and is at the same time an essential building block for the implementation of complex macros. +- [Quotes and splices](metaprogramming/macros.md) provide a principled way to express macros and staging with a unified set of abstractions. +- [Type class derivation](contextual/derivation.md) provides an in-language implementation of the `Gen` macro in Shapeless and other foundational libraries. The new implementation is more robust, efficient and easier to use than the macro. +- [Implicit by-name parameters](contextual/by-name-context-parameters.md) provide a more robust in-language implementation of the `Lazy` macro in Shapeless. + +**Status: not yet settled** + +We know we need a practical replacement for current macros. The features listed above are very promising in that respect, but we need more complete implementations and more use cases to reach a final verdict. + +**Migration cost: very high** + +Existing macro libraries will have to be rewritten from the ground up. In many cases the rewritten libraries will turn out to be simpler and more robust than the old ones, but that does not relieve one of the cost of the rewrites. It's currently unclear to what degree users of macro libraries will be affected. We aim to provide sufficient functionality so that core macros can be re-implemented fully, but given the vast feature set of the various macro extensions to Scala 2 it is difficult to arrive at a workable limitation of scope. + +## Changes to Type Checking and Inference + +The Scala 3 compiler uses a new algorithm for type inference, which relies on a general subtype constraint solver. The new algorithm often [works better than the old](https://contributors.scala-lang.org/t/better-type-inference-for-scala-send-us-your-problematic-cases/2410), but there are inevitably situations where the results of both algorithms differ, leading to errors diagnosed by Scala 3 for programs that the Scala 2 compiler accepts. + +**Status: essential** + +The new type-checking and inference algorithms are the essential core of the new compiler. They cannot be reverted without dropping the whole implementation of Scala 3. + +**Migration cost: high** + +Some existing programs will break and, given the complex nature of type inference, it will not always be clear what change caused the breakage and how to fix it. + +In our experience, macros and changes in type and implicit argument inference together cause the large majority of problems encountered when porting existing code to Scala 3. The latter source of problems could be addressed systematically by a tool that added all inferred types and implicit arguments to a Scala 2 source code file. Most likely such a tool would be implemented as a [Scala 2 compiler plugin](https://docs.scala-lang.org/overviews/plugins/index.html). The resulting code would have a greatly increased likelihood to compile under Scala 3, but would often be bulky to the point of being unreadable. A second part of the rewriting tool should then selectively and iteratively remove type and implicit annotations that were synthesized by the first part as long as they compile under Scala 3. This second part could be implemented as a program that invokes the Scala 3 compiler `scalac` programmatically. + +Several people have proposed such a tool for some time now. I believe it is time we find the will and the resources to actually implement it. diff --git a/docs/_spec/TODOreference/language-versions/binary-compatibility.md b/docs/_spec/TODOreference/language-versions/binary-compatibility.md new file mode 100644 index 000000000000..df1c19f97868 --- /dev/null +++ b/docs/_spec/TODOreference/language-versions/binary-compatibility.md @@ -0,0 +1,13 @@ +--- +layout: doc-page +title: "Binary Compatibility" +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html +--- + +In Scala 2 different minor versions of the compiler were free to change the way how they encode different language features in JVM bytecode so each bump of the compiler's minor version resulted in breaking binary compatibility and if a project had any Scala dependencies they all needed to be (cross-)compiled to the same minor Scala version that was used in that project itself. On the contrary, Scala 3 has a stable encoding into JVM bytecode. + +In addition to classfiles the compilation process in Scala 3 also produces files with `.tasty` extension. The [TASTy](https://docs.scala-lang.org/scala3/guides/tasty-overview.html) format is an intermediate representation of Scala code containing full information about sources together with information provided by the typer. Some of this information is lost during generation of bytecode so Scala 3 compilers read TASTy files during compilation in addition to classfiles to know the exact types of values, methods, etc. in already compiled classes (although compilation from TASTy files only is also possible). TASTy files are also typically distributed together with classfiles in published artifacts. + +TASTy format is extensible but it preserves backward compatibility and the evolution happens between minor releases of the language. This means a Scala compiler in version `3.x1.y1` is able to read TASTy files produced by another compiler in version `3.x2.y2` if `x1 >= x2` (assuming two stable versions of the compiler are considered - `SNAPSHOT` or `NIGHTLY` compiler versions can read TASTy in an older stable format but their TASTY versions are not compatible between each other even if the compilers have the same minor version; also compilers in stable versions cannot read TASTy generated by an unstable version). + +TASTy version number has the format of `.-` and the numbering changes in parallel to language releases in such a way that a bump in language minor version corresponds to a bump in TASTy minor version (e.g. for Scala `3.0.0` the TASTy version is `28.0-0`). Experimental version set to 0 signifies a stable version while others are considered unstable/experimental. TASTy version is not strictly bound to the data format itself - any changes to the API of the standard library also require a change in TASTy minor version. diff --git a/docs/_spec/TODOreference/language-versions/language-versions.md b/docs/_spec/TODOreference/language-versions/language-versions.md new file mode 100644 index 000000000000..2dfd04857cab --- /dev/null +++ b/docs/_spec/TODOreference/language-versions/language-versions.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Language Versions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/index.html +--- + +Additional information on interoperability and migration between Scala 2 and 3 can be found [here](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html). diff --git a/docs/_spec/TODOreference/language-versions/source-compatibility.md b/docs/_spec/TODOreference/language-versions/source-compatibility.md new file mode 100644 index 000000000000..4d5b468ac8f2 --- /dev/null +++ b/docs/_spec/TODOreference/language-versions/source-compatibility.md @@ -0,0 +1,43 @@ +--- +layout: doc-page +title: "Source Compatibility" +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/source-compatibility.html +--- + +Scala 3 does NOT guarantee source compatibility between different minor language versions (e.g. some syntax valid in 3.x might get deprecated and then phased out in 3.y for y > x). There are also some syntax structures that were valid in Scala 2 but are not anymore in Scala 3. However the compiler provides a possibility to specify the desired version of syntax used in a particular file or globally for a run of the compiler to make migration between versions easier. + +The default Scala language syntax version currently supported by the Dotty compiler is [`3.2`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2$.html). There are also other language versions that can be specified instead: + +- [`3.0-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/0-migration$.html): Same as +`3.0` and `3.1`, but with a Scala 2 compatibility mode that helps moving Scala 2.13 sources over to Scala 3. In particular, it + + - flags some Scala 2 constructs that are disallowed in Scala 3 as migration warnings instead of hard errors, + - changes some rules to be more lenient and backwards compatible with Scala 2.13 + - gives some additional warnings where the semantics has changed between Scala 2.13 and 3.0 + - in conjunction with `-rewrite`, offer code rewrites from Scala 2.13 to 3.0. + +- [`3.0`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/0$.html), [`3.1`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/1$.html): the default set of features included in scala versions `3.0.0` to `3.1.3`. +- [`3.2`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2$.html): the same as `3.0` and `3.1`, but in addition: + - [stricter pattern bindings](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html) are now enabled (part of `future` in earlier `3.x` releases), producing warnings for refutable patterns. These warnings can be silenced to achieve the same runtime behavior, but in `future` they become errors and refutable patterns will not compile. + - [Nonlocal returns](https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html) now produce a warning upon usage (they are still an error under `future`). +- [`3.2-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2-migration$.html): the same as `3.2`, but in conjunction with `-rewrite`, offer code rewrites from Scala `3.0/3.1` to `3.2`. +- [`future`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future$.html): A preview of changes that will be introduced in `3.x` versions after `3.2`. +Some Scala 2 specific idioms are dropped in this version. The feature set supported by this version may grow over time as features become stabilised for preview. + +- [`future-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future-migration$.html): Same as `future` but with additional helpers to migrate from `3.2`. Similarly to the helpers available under `3.0-migration`, these include migration warnings and optional rewrites. + +There are two ways to specify a language version : + +- with a `-source` command line setting, e.g. `-source 3.0-migration`. +- with a `scala.language` import at the top of a source file, e.g: + +```scala +package p +import scala.language.`future-migration` + +class C { ... } +``` + +Language imports supersede command-line settings in the source files where they are specified. Only one language import specifying a source version is allowed in a source file, and it must come before any definitions in that file. + +**Note**: The [Scala 3 Migration Guide](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html) gives further information to help the Scala programmer moving from Scala 2.13 to Scala 3. diff --git a/docs/_spec/TODOreference/metaprogramming/compiletime-ops.md b/docs/_spec/TODOreference/metaprogramming/compiletime-ops.md new file mode 100644 index 000000000000..a43c941ae943 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/compiletime-ops.md @@ -0,0 +1,294 @@ +--- +layout: doc-page +title: "Compile-time operations" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/compiletime-ops.html +--- + +## The `scala.compiletime` Package + +The [`scala.compiletime`](https://scala-lang.org/api/3.x/scala/compiletime.html) package contains helper definitions that provide support for compile-time operations over values. They are described in the following. + +### `constValue` and `constValueOpt` + +`constValue` is a function that produces the constant value represented by a +type. + +```scala +import scala.compiletime.constValue +import scala.compiletime.ops.int.S + +transparent inline def toIntC[N]: Int = + inline constValue[N] match + case 0 => 0 + case _: S[n1] => 1 + toIntC[n1] + +inline val ctwo = toIntC[2] +``` + +`constValueOpt` is the same as `constValue`, however returning an `Option[T]` +enabling us to handle situations where a value is not present. Note that `S` is +the type of the successor of some singleton type. For example the type `S[1]` is +the singleton type `2`. + +### `erasedValue` + +So far we have seen inline methods that take terms (tuples and integers) as +parameters. What if we want to base case distinctions on types instead? For +instance, one would like to be able to write a function `defaultValue`, that, +given a type `T`, returns optionally the default value of `T`, if it exists. +We can already express this using rewrite match expressions and a simple +helper function, `scala.compiletime.erasedValue`, which is defined as follows: + +```scala +def erasedValue[T]: T +``` + +The `erasedValue` function _pretends_ to return a value of its type argument `T`. +Calling this function will always result in a compile-time error unless the call +is removed from the code while inlining. + +Using `erasedValue`, we can then define `defaultValue` as follows: + +```scala +import scala.compiletime.erasedValue + +transparent inline def defaultValue[T] = + inline erasedValue[T] match + case _: Byte => Some(0: Byte) + case _: Char => Some(0: Char) + case _: Short => Some(0: Short) + case _: Int => Some(0) + case _: Long => Some(0L) + case _: Float => Some(0.0f) + case _: Double => Some(0.0d) + case _: Boolean => Some(false) + case _: Unit => Some(()) + case _ => None +``` + +Then: + +```scala +val dInt: Some[Int] = defaultValue[Int] +val dDouble: Some[Double] = defaultValue[Double] +val dBoolean: Some[Boolean] = defaultValue[Boolean] +val dAny: None.type = defaultValue[Any] +``` + +As another example, consider the type-level version of `toInt` below: +given a _type_ representing a Peano number, +return the integer _value_ corresponding to it. +Consider the definitions of numbers as in the _Inline +Match_ section above. Here is how `toIntT` can be defined: + +```scala +transparent inline def toIntT[N <: Nat]: Int = + inline scala.compiletime.erasedValue[N] match + case _: Zero.type => 0 + case _: Succ[n] => toIntT[n] + 1 + +inline val two = toIntT[Succ[Succ[Zero.type]]] +``` + +`erasedValue` is an `erased` method so it cannot be used and has no runtime +behavior. Since `toIntT` performs static checks over the static type of `N` we +can safely use it to scrutinize its return type (`S[S[Z]]` in this case). + +### `error` + +The `error` method is used to produce user-defined compile errors during inline expansion. +It has the following signature: + +```scala +inline def error(inline msg: String): Nothing +``` + +If an inline expansion results in a call `error(msgStr)` the compiler +produces an error message containing the given `msgStr`. + +```scala +import scala.compiletime.{error, codeOf} + +inline def fail() = + error("failed for a reason") + +fail() // error: failed for a reason +``` + +or + +```scala +inline def fail(inline p1: Any) = + error("failed on: " + codeOf(p1)) + +fail(identity("foo")) // error: failed on: identity[String]("foo") +``` + +### The `scala.compiletime.ops` package + +The [`scala.compiletime.ops`](https://scala-lang.org/api/3.x/scala/compiletime/ops.html) package contains types that provide support for +primitive operations on singleton types. For example, +`scala.compiletime.ops.int.*` provides support for multiplying two singleton +`Int` types, and `scala.compiletime.ops.boolean.&&` for the conjunction of two +`Boolean` types. When all arguments to a type in `scala.compiletime.ops` are +singleton types, the compiler can evaluate the result of the operation. + +```scala +import scala.compiletime.ops.int.* +import scala.compiletime.ops.boolean.* + +val conjunction: true && true = true +val multiplication: 3 * 5 = 15 +``` + +Many of these singleton operation types are meant to be used infix (as in [SLS §3.2.10](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#infix-types)). + +Since type aliases have the same precedence rules as their term-level +equivalents, the operations compose with the expected precedence rules: + +```scala +import scala.compiletime.ops.int.* +val x: 1 + 2 * 3 = 7 +``` + +The operation types are located in packages named after the type of the +left-hand side parameter: for instance, `scala.compiletime.ops.int.+` represents +addition of two numbers, while `scala.compiletime.ops.string.+` represents string +concatenation. To use both and distinguish the two types from each other, a +match type can dispatch to the correct implementation: + +```scala +import scala.compiletime.ops.* + +import scala.annotation.infix + +type +[X <: Int | String, Y <: Int | String] = (X, Y) match + case (Int, Int) => int.+[X, Y] + case (String, String) => string.+[X, Y] + +val concat: "a" + "b" = "ab" +val addition: 1 + 1 = 2 +``` + +## Summoning Implicits Selectively + +It is foreseen that many areas of typelevel programming can be done with rewrite +methods instead of implicits. But sometimes implicits are unavoidable. The +problem so far was that the Prolog-like programming style of implicit search +becomes viral: Once some construct depends on implicit search it has to be +written as a logic program itself. Consider for instance the problem of creating +a `TreeSet[T]` or a `HashSet[T]` depending on whether `T` has an `Ordering` or +not. We can create a set of implicit definitions like this: + +```scala +trait SetFor[T, S <: Set[T]] + +class LowPriority: + implicit def hashSetFor[T]: SetFor[T, HashSet[T]] = ... + +object SetsFor extends LowPriority: + implicit def treeSetFor[T: Ordering]: SetFor[T, TreeSet[T]] = ... +``` + +Clearly, this is not pretty. Besides all the usual indirection of implicit +search, we face the problem of rule prioritization where we have to ensure that +`treeSetFor` takes priority over `hashSetFor` if the element type has an +ordering. This is solved (clumsily) by putting `hashSetFor` in a superclass +`LowPriority` of the object `SetsFor` where `treeSetFor` is defined. Maybe the +boilerplate would still be acceptable if the crufty code could be contained. +However, this is not the case. Every user of the abstraction has to be +parameterized itself with a `SetFor` implicit. Considering the simple task _"I +want a `TreeSet[T]` if `T` has an ordering and a `HashSet[T]` otherwise"_, this +seems like a lot of ceremony. + +There are some proposals to improve the situation in specific areas, for +instance by allowing more elaborate schemes to specify priorities. But they all +keep the viral nature of implicit search programs based on logic programming. + +By contrast, the new `summonFrom` construct makes implicit search available +in a functional context. To solve the problem of creating the right set, one +would use it as follows: + +```scala +import scala.compiletime.summonFrom + +inline def setFor[T]: Set[T] = summonFrom { + case ord: Ordering[T] => new TreeSet[T]()(using ord) + case _ => new HashSet[T] +} +``` + +A `summonFrom` call takes a pattern matching closure as argument. All patterns +in the closure are type ascriptions of the form `identifier : Type`. + +Patterns are tried in sequence. The first case with a pattern `x: T` such that an implicit value of type `T` can be summoned is chosen. + +Alternatively, one can also use a pattern-bound given instance, which avoids the explicit using clause. For instance, `setFor` could also be formulated as follows: + +```scala +import scala.compiletime.summonFrom + +inline def setFor[T]: Set[T] = summonFrom { + case given Ordering[T] => new TreeSet[T] + case _ => new HashSet[T] +} +``` + +`summonFrom` applications must be reduced at compile time. + +Consequently, if we summon an `Ordering[String]` the code above will return a +new instance of `TreeSet[String]`. + +```scala +summon[Ordering[String]] + +println(setFor[String].getClass) // prints class scala.collection.immutable.TreeSet +``` + +**Note** `summonFrom` applications can raise ambiguity errors. Consider the following +code with two givens in scope of type `A`. The pattern match in `f` will raise +an ambiguity error of `f` is applied. + +```scala +class A +given a1: A = new A +given a2: A = new A + +inline def f: Any = summonFrom { + case given _: A => ??? // error: ambiguous givens +} +``` + +## `summonInline` + +The shorthand `summonInline` provides a simple way to write a `summon` that is delayed until the call is inlined. +Unlike `summonFrom`, `summonInline` also yields the implicit-not-found error, if a given instance of the summoned +type is not found. +```scala +import scala.compiletime.summonInline +import scala.annotation.implicitNotFound + +@implicitNotFound("Missing One") +trait Missing1 + +@implicitNotFound("Missing Two") +trait Missing2 + +trait NotMissing +given NotMissing = ??? + +transparent inline def summonInlineCheck[T <: Int](inline t : T) : Any = + inline t match + case 1 => summonInline[Missing1] + case 2 => summonInline[Missing2] + case _ => summonInline[NotMissing] + +val missing1 = summonInlineCheck(1) // error: Missing One +val missing2 = summonInlineCheck(2) // error: Missing Two +val notMissing : NotMissing = summonInlineCheck(3) +``` + +## Reference + +For more information about compile-time operations, see [PR #4768](https://github.com/lampepfl/dotty/pull/4768), +which explains how `summonFrom`'s predecessor (implicit matches) can be used for typelevel programming and code specialization and [PR #7201](https://github.com/lampepfl/dotty/pull/7201) which explains the new `summonFrom` syntax. diff --git a/docs/_spec/TODOreference/metaprogramming/inline.md b/docs/_spec/TODOreference/metaprogramming/inline.md new file mode 100644 index 000000000000..0c4800069bad --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/inline.md @@ -0,0 +1,390 @@ +--- +layout: doc-page +title: Inline +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/inline.html +--- + +## Inline Definitions + +`inline` is a new [soft modifier](../soft-modifier.md) that guarantees that a +definition will be inlined at the point of use. Example: + +```scala +object Config: + inline val logging = false + +object Logger: + + private var indent = 0 + + inline def log[T](msg: String, indentMargin: =>Int)(op: => T): T = + if Config.logging then + println(s"${" " * indent}start $msg") + indent += indentMargin + val result = op + indent -= indentMargin + println(s"${" " * indent}$msg = $result") + result + else op +end Logger +``` + +The `Config` object contains a definition of the **inline value** `logging`. +This means that `logging` is treated as a _constant value_, equivalent to its +right-hand side `false`. The right-hand side of such an `inline val` must itself +be a [constant expression](https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions). +Used in this way, `inline` is equivalent to Java and Scala 2's `final`. Note that `final`, meaning +_inlined constant_, is still supported in Scala 3, but will be phased out. + +The `Logger` object contains a definition of the **inline method** `log`. This +method will always be inlined at the point of call. + +In the inlined code, an `if-then-else` with a constant condition will be rewritten +to its `then`- or `else`-part. Consequently, in the `log` method above the +`if Config.logging` with `Config.logging == true` will get rewritten into its +`then`-part. + +Here's an example: + +```scala +var indentSetting = 2 + +def factorial(n: BigInt): BigInt = + log(s"factorial($n)", indentSetting) { + if n == 0 then 1 + else n * factorial(n - 1) + } +``` + +If `Config.logging == false`, this will be rewritten (simplified) to: + +```scala +def factorial(n: BigInt): BigInt = + if n == 0 then 1 + else n * factorial(n - 1) +``` + +As you notice, since neither `msg` or `indentMargin` were used, they do not +appear in the generated code for `factorial`. Also note the body of our `log` +method: the `else-` part reduces to just an `op`. In the generated code we do +not generate any closures because we only refer to a by-name parameter *once*. +Consequently, the code was inlined directly and the call was beta-reduced. + +In the `true` case the code will be rewritten to: + +```scala +def factorial(n: BigInt): BigInt = + val msg = s"factorial($n)" + println(s"${" " * indent}start $msg") + Logger.inline$indent_=(indent.+(indentSetting)) + val result = + if n == 0 then 1 + else n * factorial(n - 1) + Logger.inline$indent_=(indent.-(indentSetting)) + println(s"${" " * indent}$msg = $result") + result +``` + +Note that the by-value parameter `msg` is evaluated only once, per the usual Scala +semantics, by binding the value and reusing the `msg` through the body of +`factorial`. Also, note the special handling of the assignment to the private var +`indent`. It is achieved by generating a setter method `def inline$indent_=` and calling it instead. + +Inline methods always have to be fully applied. For instance, a call to +```scala +Logger.log[String]("some op", indentSetting) +``` +would be ill-formed and the compiler would complain that arguments are missing. +However, it is possible to pass wildcard arguments instead. For instance, +```scala +Logger.log[String]("some op", indentSetting)(_) +``` +would typecheck. + +### Recursive Inline Methods + +Inline methods can be recursive. For instance, when called with a constant +exponent `n`, the following method for `power` will be implemented by +straight inline code without any loop or recursion. + +```scala +inline def power(x: Double, n: Int): Double = + if n == 0 then 1.0 + else if n == 1 then x + else + val y = power(x, n / 2) + if n % 2 == 0 then y * y else y * y * x + +power(expr, 10) +// translates to +// +// val x = expr +// val y1 = x * x // ^2 +// val y2 = y1 * y1 // ^4 +// val y3 = y2 * x // ^5 +// y3 * y3 // ^10 +``` + +Parameters of inline methods can have an `inline` modifier as well. This means +that actual arguments to these parameters will be inlined in the body of the +`inline def`. `inline` parameters have call semantics equivalent to by-name parameters +but allow for duplication of the code in the argument. It is usually useful when constant +values need to be propagated to allow further optimizations/reductions. + +The following example shows the difference in translation between by-value, by-name and `inline` +parameters: + +```scala +inline def funkyAssertEquals(actual: Double, expected: =>Double, inline delta: Double): Unit = + if (actual - expected).abs > delta then + throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${delta}") + +funkyAssertEquals(computeActual(), computeExpected(), computeDelta()) +// translates to +// +// val actual = computeActual() +// def expected = computeExpected() +// if (actual - expected).abs > computeDelta() then +// throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${computeDelta()}") +``` + +### Rules for Overriding + +Inline methods can override other non-inline methods. The rules are as follows: + +1. If an inline method `f` implements or overrides another, non-inline method, the inline method can also be invoked at runtime. For instance, consider the scenario: + + ```scala + abstract class A: + def f: Int + def g: Int = f + + class B extends A: + inline def f = 22 + override inline def g = f + 11 + + val b = new B + val a: A = b + // inlined invocatons + assert(b.f == 22) + assert(b.g == 33) + // dynamic invocations + assert(a.f == 22) + assert(a.g == 33) + ``` + + The inlined invocations and the dynamically dispatched invocations give the same results. + +2. Inline methods are effectively final. + +3. Inline methods can also be abstract. An abstract inline method can be implemented only by other inline methods. It cannot be invoked directly: + + ```scala + abstract class A: + inline def f: Int + + object B extends A: + inline def f: Int = 22 + + B.f // OK + val a: A = B + a.f // error: cannot inline f in A. + ``` + +### Relationship to `@inline` + +Scala 2 also defines a `@inline` annotation which is used as a hint for the +backend to inline code. The `inline` modifier is a more powerful option: + +- expansion is guaranteed instead of best effort, +- expansion happens in the frontend instead of in the backend and +- expansion also applies to recursive methods. + + + +### The definition of constant expression + +Right-hand sides of inline values and of arguments for inline parameters must be +constant expressions in the sense defined by the [SLS §6.24](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions), +including _platform-specific_ extensions such as constant folding of pure +numeric computations. + +An inline value must have a literal type such as `1` or `true`. + +```scala +inline val four = 4 +// equivalent to +inline val four: 4 = 4 +``` + +It is also possible to have inline vals of types that do not have a syntax, such as `Short(4)`. + +```scala +trait InlineConstants: + inline val myShort: Short + +object Constants extends InlineConstants: + inline val myShort/*: Short(4)*/ = 4 +``` + +## Transparent Inline Methods + +Inline methods can additionally be declared `transparent`. +This means that the return type of the inline method can be +specialized to a more precise type upon expansion. Example: + +```scala +class A +class B extends A: + def m = true + +transparent inline def choose(b: Boolean): A = + if b then new A else new B + +val obj1 = choose(true) // static type is A +val obj2 = choose(false) // static type is B + +// obj1.m // compile-time error: `m` is not defined on `A` +obj2.m // OK +``` + +Here, the inline method `choose` returns an instance of either of the two types `A` or `B`. +If `choose` had not been declared to be `transparent`, the result +of its expansion would always be of type `A`, even though the computed value might be of the subtype `B`. +The inline method is a "blackbox" in the sense that details of its implementation do not leak out. +But if a `transparent` modifier is given, the expansion is the type of the expanded body. If the argument `b` +is `true`, that type is `A`, otherwise it is `B`. Consequently, calling `m` on `obj2` +type-checks since `obj2` has the same type as the expansion of `choose(false)`, which is `B`. +Transparent inline methods are "whitebox" in the sense that the type +of an application of such a method can be more specialized than its declared +return type, depending on how the method expands. + +In the following example, we see how the return type of `zero` is specialized to +the singleton type `0` permitting the addition to be ascribed with the correct +type `1`. + +```scala +transparent inline def zero: Int = 0 + +val one: 1 = zero + 1 +``` + +### Transparent vs. non-transparent inline + +As we already discussed, transparent inline methods may influence type checking at call site. +Technically this implies that transparent inline methods must be expanded during type checking of the program. +Other inline methods are inlined later after the program is fully typed. + +For example, the following two functions will be typed the same way but will be inlined at different times. + +```scala +inline def f1: T = ... +transparent inline def f2: T = (...): T +``` + +A noteworthy difference is the behavior of `transparent inline given`. +If there is an error reported when inlining that definition, it will be considered as an implicit search mismatch and the search will continue. +A `transparent inline given` can add a type ascription in its RHS (as in `f2` from the previous example) to avoid the precise type but keep the search behavior. +On the other hand, an `inline given` is taken as an implicit and then inlined after typing. +Any error will be emitted as usual. + +## Inline Conditionals + +An if-then-else expression whose condition is a constant expression can be simplified to +the selected branch. Prefixing an if-then-else expression with `inline` enforces that +the condition has to be a constant expression, and thus guarantees that the conditional will always +simplify. + +Example: + +```scala +inline def update(delta: Int) = + inline if delta >= 0 then increaseBy(delta) + else decreaseBy(-delta) +``` + +A call `update(22)` would rewrite to `increaseBy(22)`. But if `update` was called with +a value that was not a compile-time constant, we would get a compile time error like the one +below: + +```scala + | inline if delta >= 0 then ??? + | ^ + | cannot reduce inline if + | its condition + | delta >= 0 + | is not a constant value + | This location is in code that was inlined at ... +``` + +In a transparent inline, an `inline if` will force the inlining of any inline definition in its condition during type checking. + +## Inline Matches + +A `match` expression in the body of an `inline` method definition may be +prefixed by the `inline` modifier. If there is enough type information +at compile time to select a branch, the expression is reduced to that branch and the +type of the expression is the type of the right-hand side of that result. +If not, a compile-time error is raised that reports that the match cannot be reduced. + +The example below defines an inline method with a +single inline match expression that picks a case based on its static type: + +```scala +transparent inline def g(x: Any): Any = + inline x match + case x: String => (x, x) // Tuple2[String, String](x, x) + case x: Double => x + +g(1.0d) // Has type 1.0d which is a subtype of Double +g("test") // Has type (String, String) +``` + +The scrutinee `x` is examined statically and the inline match is reduced +accordingly returning the corresponding value (with the type specialized because `g` is declared `transparent`). +This example performs a simple type test over the scrutinee. +The type can have a richer structure like the simple ADT below. +`toInt` matches the structure of a number in [Church-encoding](https://en.wikipedia.org/wiki/Church_encoding) +and _computes_ the corresponding integer. + +```scala +trait Nat +case object Zero extends Nat +case class Succ[N <: Nat](n: N) extends Nat + +transparent inline def toInt(n: Nat): Int = + inline n match + case Zero => 0 + case Succ(n1) => toInt(n1) + 1 + +inline val natTwo = toInt(Succ(Succ(Zero))) +val intTwo: 2 = natTwo +``` + +`natTwo` is inferred to have the singleton type 2. + +## Reference + +For more information about the semantics of `inline`, see the [Scala 2020: Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) paper. diff --git a/docs/_spec/TODOreference/metaprogramming/macros-spec.md b/docs/_spec/TODOreference/metaprogramming/macros-spec.md new file mode 100644 index 000000000000..6045354fdbbc --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/macros-spec.md @@ -0,0 +1,714 @@ +--- +layout: doc-page +title: "Macros Spec" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros-spec.html +--- + +## Formalization + +* Multi-stage programming with generative and analytical macros[^2] +* Multi-Stage Macro Calculus, Chapter 4 of Scalable Metaprogramming in Scala 3[^1]. + Contains and extends the calculus of _Multi-stage programming with generative and analytical macros_ with type polymorphism. + +## Syntax + +The quotation syntax using `'` and `$` was chosen to mimic the string interpolation syntax of Scala. +Like a string double-quotation, a single-quote block can contain splices. +However, unlike strings, splices can contain quotes using the same rules. + +```scala +s" Hello $name" s" Hello ${name}" +'{ hello($name) } '{ hello(${name}) } +${ hello('name) } ${ hello('{name}) } +``` + +### Quotes +Quotes come in four flavors: quoted identifiers, quoted blocks, quoted block patterns and quoted type patterns. +Scala 2 used quoted identifiers to represent `Symbol` literals. They were deprecated in Scala 3, allowing to use them for quotation. +```scala +SimpleExpr ::= ... + | `'` alphaid // quoted identifier + | `'` `{` Block `}` // quoted block +Pattern ::= ... + | `'` `{` Block `}` // quoted block pattern + | `'` `[` Type `]` // quoted type pattern +``` + +Quoted blocks and quoted block patterns contain an expression equivalent to a normal block of code. +When entering either of those we track the fact that we are in a quoted block (`inQuoteBlock`) which is used for spliced identifiers. +When entering a quoted block pattern we additionally track the fact that we are in a quoted pattern (`inQuotePattern`) which is used to distinguish spliced blocks and splice patterns. +Lastly, the quoted type pattern simply contains a type. + +### Splices +Splices come in three flavors: spliced identifiers, spliced blocks and splice patterns. +Scala specifies identifiers containing `$` as valid identifiers but reserves them for compiler and standard library use only. +Unfortunately, many libraries have used such identifiers in Scala~2. Therefore to mitigate the cost of migration, we still support them. +We work around this by only allowing spliced identifiers[^3] within quoted blocks or quoted block patterns (`inQuoteBlock`). +Splice blocks and splice patterns can contain an arbitrary block or pattern respectively. +They are distinguished based on their surrounding quote (`inQuotePattern`), a quote block will contain spliced blocks, and a quote block pattern will contain splice patterns. + +```scala +SimpleExpr ::= ... + | `$` alphaid if inQuoteBlock // spliced identifier + | `$` `{` Block `}` if !inQuotePattern // spliced block + | `$` `{` Pattern `}` if inQuotePattern // splice pattern +``` + +### Quoted Pattern Type Variables +Quoted pattern type variables in quoted patterns and quoted type patterns do not require additional syntax. +Any type definition or reference with a name composed of lower cases is assumed to be a pattern type variable definition while typing. +A backticked type name with lower cases is interpreted as a reference to the type with that name. + + +## Implementation + +### Run-Time Representation + +The standard library defines the `Quotes` interface which contains all the logic and the abstract classes `Expr` and `Type`. +The compiler implements the `Quotes` interface and provides the implementation of `Expr` and `Type`. + +##### `class Expr` +Expressions of type `Expr[T]` are represented by the following abstract class: +```scala +abstract class Expr[+T] private[scala] +``` +The only implementation of `Expr` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps a typed AST and a `Scope` object with no methods of its own. +The `Scope` object is used to track the current splice scope and detect scope extrusions. + +##### `object Expr` +The companion object of `Expr` contains a few useful static methods; +the `apply`/`unapply` methods to use `ToExpr`/`FromExpr` with ease; +the `betaReduce` and `summon` methods. +It also contains methods to create expressions out of lists or sequences of expressions: `block`, `ofSeq`, `ofList`, `ofTupleFromSeq` and `ofTuple`. + +```scala +object Expr: + def apply[T](x: T)(using ToExpr[T])(using Quotes): Expr[T] = ... + def unapply[T](x: Expr[T])(using FromExpr[T])(using Quotes): Option[T] = ... + def betaReduce[T](e: Expr[T])(using Quotes): Expr[T] = ... + def summon[T: Type](using Quotes): Option[Expr[T]] = ... + def block[T](stats: List[Expr[Any]], e: Expr[T])(using Quotes): Expr[T] = ... + def ofSeq[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[Seq[T]] = ... + def ofList[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[List[T]] = ... + def ofTupleFromSeq(xs: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = ... + def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): + Expr[Tuple.InverseMap[T, Expr]] = ... +``` + +##### `class Type` +Types of type `Type[T]` are represented by the following abstract class: +```scala +abstract class Type[T <: AnyKind] private[scala]: + type Underlying = T +``` + +The only implementation of `Type` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps the AST of a type and a `Scope` object with no methods of its own. +The upper bound of `T` is `AnyKind` which implies that `T` may be a higher-kinded type. +The `Underlying` alias is used to select the type from an instance of `Type`. +Users never need to use this alias as they can always use `T` directly. +`Underlying` is used for internal encoding while compiling the code (see _Type Healing_). + +##### `object Type` +The companion object of `Type` contains a few useful static methods. +The first and most important one is the `Type.of` given definition. +This instance of `Type[T]` is summoned by default when no other instance is available. +The `of` operation is an intrinsic operation that the compiler will transform into code that will generate the `Type[T]` at run-time. +Secondly, the `Type.show[T]` operation will show a string representation of the type, which is often useful when debugging. +Finally, the object defines `valueOfConstant` (and `valueOfTuple`) which can transform singleton types (or tuples of singleton types) into their value. + + +```scala +object Type: + given of[T <: AnyKind](using Quotes): Type[T] = ... + def show[T <: AnyKind](using Type[T])(using Quotes): String = ... + def valueOfConstant[T](using Type[T])(using Quotes): Option[T] = ... + def valueOfTuple[T <: Tuple](using Type[T])(using Quotes): Option[T] = ... +``` + +##### `Quotes` +The `Quotes` interface is where most of the primitive operations of the quotation system are defined. + +Quotes define all the `Expr[T]` methods as extension methods. +`Type[T]` does not have methods and therefore does not appear here. +These methods are available as long as `Quotes` is implicitly given in the current scope. + +The `Quotes` instance is also the entry point to the [reflection API](./refelction.md) through the `reflect` object. + +Finally, `Quotes` provides the internal logic used in quote un-pickling (`QuoteUnpickler`) in quote pattern matching (`QuoteMatching`). +These interfaces are added to the self-type of the trait to make sure they are implemented on this object but not visible to users of `Quotes`. + +Internally, the implementation of `Quotes` will also track its current splicing scope `Scope`. +This scope will be attached to any expression that is created using this `Quotes` instance. + +```scala +trait Quotes: + this: runtime.QuoteUnpickler & runtime.QuoteMatching => + + extension [T](self: Expr[T]) + def show: String + def matches(that: Expr[Any]): Boolean + def value(using FromExpr[T]): Option[T] + def valueOrAbort(using FromExpr[T]): T + end extension + + extension (self: Expr[Any]) + def isExprOf[X](using Type[X]): Boolean + def asExprOf[X](using Type[X]): Expr[X] + end extension + + // abstract object reflect ... +``` + + +##### `Scope` +The splice context is represented as a stack (immutable list) of `Scope` objects. +Each `Scope` contains the position of the splice (used for error reporting) and a reference to the enclosing splice scope `Scope`. +A scope is a sub-scope of another if the other is contained in its parents. +This check is performed when an expression is spliced into another using the `Scope` provided in the current scope in `Quotes` and the one in the `Expr` or `Type`. + +### Entry Points +The two entry points for multi-stage programming are macros and the `run` operation. + +#### Macros +Inline macro definitions will inline a top-level splice (a splice not nested in a quote). +This splice needs to be evaluated at compile-time. +In _Avoiding a complete interpreter_[^1], we stated the following restrictions: + + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are either literal constants, quoted expressions (parameters), `Type.of` for type parameters and a reference to `Quotes`. + +These restrictions make the implementation of the interpreter quite simple. +Java Reflection is used to call the single function call in the top-level splice. +The execution of that function is entirely done on compiled bytecode. +These are Scala static methods and may not always become Java static methods, they might be inside module objects. +As modules are encoded as class instances, we need to interpret the prefix of the method to instantiate it before we can invoke the method. + +The code of the arguments has not been compiled and therefore needs to be interpreted by the compiler. +Interpreting literal constants is as simple as extracting the constant from the AST that represents literals. +When interpreting a quoted expression, the contents of the quote is kept as an AST which is wrapped inside the implementation of `Expr`. +Calls to `Type.of[T]` also wrap the AST of the type inside the implementation of `Type`. +Finally, the reference to `Quotes` is supposed to be the reference to the quotes provided by the splice. +This reference is interpreted as a new instance of `Quotes` that contains a fresh initial `Scope` with no parents. + +The result of calling the method via Java Reflection will return an `Expr` containing a new AST that was generated by the implementation of that macro. +The scope of this `Expr` is checked to make sure it did not extrude from some splice or `run` operation. +Then the AST is extracted from the `Expr` and it is inserted as replacement for the AST that contained the top-level splice. + + +#### Run-time Multi-Stage Programming + +To be able to compile the code, the `scala.quoted.staging` library defines the `Compiler` trait. +An instance of `staging.Compiler` is a wrapper over the normal Scala~3 compiler. +To be instantiated it requires an instance of the JVM _classloader_ of the application. + +```scala +import scala.quoted.staging.* +given Compiler = Compiler.make(getClass.getClassLoader) +``` + +The classloader is needed for the compiler to know which dependencies have been loaded and to load the generated code using the same classloader. + +```scala +def mkPower2()(using Quotes): Expr[Double => Double] = ... + +run(mkPower2()) +``` +To run the previous example, the compiler will create code equivalent to the following class and compile it using a new `Scope` without parents. + +```scala +class RunInstance: + def exec(): Double => Double = ${ mkPower2() } +``` +Finally, `run` will interpret `(new RunInstance).exec()` to evaluate the contents of the quote. +To do this, the resulting `RunInstance` class is loaded in the JVM using Java Reflection, instantiated and then the `exec` method is invoked. + + +### Compilation + +Quotes and splices are primitive forms in the generated typed abstract syntax trees. +These need to be type-checked with some extra rules, e.g., staging levels need to be checked and the references to generic types need to be adapted. +Finally, quoted expressions that will be generated at run-time need to be encoded (serialized) and decoded (deserialized). + +#### Typing Quoted Expressions + +The typing process for quoted expressions and splices with `Expr` is relatively straightforward. +At its core, quotes are desugared into calls to `quote`, splices are desugared into calls to `splice`. +We track the quotation level when desugaring into these methods. + + +```scala +def quote[T](x: T): Quotes ?=> Expr[T] + +def splice[T](x: Quotes ?=> Expr[T]): T +``` + +It would be impossible to track the quotation levels if users wrote calls to these methods directly. +To know if it is a call to one of those methods we would need to type it first, but to type it we would need to know if it is one of these methods to update the quotation level. +Therefore these methods can only be used by the compiler. + +At run-time, the splice needs to have a reference to the `Quotes` that created its surrounding quote. +To simplify this for later phases, we track the current `Quotes` and encode a reference directly in the splice using `nestedSplice` instead of `splice`. + +```scala +def nestedSplice[T](q: Quotes)(x: q.Nested ?=> Expr[T]): T +``` +With this addition, the original `splice` is only used for top-level splices. + +The levels are mostly used to identify top-level splices that need to be evaluated while typing. +We do not use the quotation level to influence the typing process. +Level checking is performed at a later phase. +This ensures that a source expression in a quote will have the same elaboration as a source expression outside the quote. + + + +#### Quote Pattern Matching + +Pattern matching is defined in the trait `QuoteMatching`, which is part of the self type of `Quotes`. +It is implemented by `Quotes` but not available to users of `Quotes`. +To access it, the compiler generates a cast from `Quotes` to `QuoteMatching` and then selects one of its two members: `ExprMatch` or `TypeMatch`. +`ExprMatch` defines an `unapply` extractor method that is used to encode quote patterns and `TypeMatch` defines an `unapply` method for quoted type patterns. + +```scala +trait Quotes: + self: runtime.QuoteMatching & ... => + ... + +trait QuoteMatching: + object ExprMatch: + def unapply[TypeBindings <: Tuple, Tup <: Tuple] + (scrutinee: Expr[Any]) + (using pattern: Expr[Any]): Option[Tup] = ... + object TypeMatch: + ... +``` + +These extractor methods are only meant to be used in code generated by the compiler. +The call to the extractor that is generated has an already elaborated form that cannot be written in source, namely explicit type parameters and explicit contextual parameters. + +This extractor returns a tuple type `Tup` which cannot be inferred from the types in the method signature. +This type will be computed when typing the quote pattern and will be explicitly added to the extractor call. +To refer to type variables in arbitrary places of `Tup`, we need to define them all before their use, hence we have `TypeBindings`, which will contain all pattern type variable definitions. +The extractor also receives a given parameter of type `Expr[Any]` that will contain an expression that represents the pattern. +The compiler will explicitly add this pattern expression. +We use a given parameter because these are the only parameters we are allowed to add to the extractor call in a pattern position. + +This extractor is a bit convoluted, but it encodes away all the quotation-specific features. +It compiles the pattern down into a representation that the pattern matcher compiler phase understands. + +The quote patterns are encoded into two parts: a tuple pattern that is tasked with extracting the result of the match and a quoted expression representing the pattern. +For example, if the pattern has no `$` we will have an `EmptyTuple` as the pattern and `'{1}` to represent the pattern. + +```scala + case '{ 1 } => +// is elaborated to + case ExprMatch(EmptyTuple)(using '{1}) => +// ^^^^^^^^^^ ^^^^^^^^^^ +// pattern expression +``` +When extracting expressions, each pattern that is contained in a splice `${..}` will be placed in order in the tuple pattern. +In the following case, the `f` and `x` are placed in a tuple pattern `(f, x)`. +The type of the tuple is encoded in the `Tup` and not only in the tuple itself. +Otherwise, the extractor would return a tuple `Tuple` for which the types need to be tested which is in turn not possible due to type erasure. + +```scala + case '{ ((y: Int) => $f(y)).apply($x) } => +// is elaborated to + case ExprMatch[.., (Expr[Int => Int], Expr[Int])]((f, x))(using pattern) => +// pattern = '{ ((y: Int) => pat[Int](y)).apply(pat[Int]()) } +``` +The contents of the quote are transformed into a valid quote expression by replacing the splice with a marker expression `pat[T](..)`. +The type `T` is taken from the type of the splice and the arguments are the HOAS arguments. +This implies that a `pat[T]()` is a closed pattern and `pat[T](y)` is an HOAS pattern that can refer to `y`. + + +Type variables in quoted patterns are first normalized to have all definitions at the start of the pattern. +For each definition of a type variable `t` in the pattern we will add a type variable definition in `TypeBindings`. +Each one will have a corresponding `Type[t]` that will get extracted if the pattern matches. +These `Type[t]` are also listed in the `Tup` and added in the tuple pattern. +It is additionally marked as `using` in the pattern to make it implicitly available in this case branch. + + +```scala + case '{ type t; ($xs: List[t]).map[t](identity[t]) } => +// is elaborated to + case ExprMatch[(t), (Type[t], Expr[List[t]])]((using t, xs))(using p) => +// ^^^ ^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^ ^^^^^^^ +// type bindings result type pattern expression +// p = '{ @patternType type u; pat[List[u]]().map[u](identity[u]) } +``` + +The contents of the quote are transformed into a valid quote expression by replacing type variables with fresh ones that do not escape the quote scope. +These are also annotated to be easily identifiable as pattern variables. + +#### Level Consistency Checking +Level consistency checking is performed after typing the program as a static check. +To check level consistency we traverse the tree top-down remembering the context staging level. +Each local definition in scope is recorded with its level and each term reference to a definition is checked against the current staging level. +```scala +// level 0 +'{ // level 1 + val x = ... // level 1 with (x -> 1) + ${ // level 0 (x -> 1) + val y = ... // level 0 with (x -> 1, y -> 0) + x // error: defined at level 1 but used in level 0 + } + // level 1 (x -> 1) + x // x is ok +} +``` + +#### Type Healing + +When using a generic type `T` in a future stage, it is necessary to have a given `Type[T]` in scope. +The compiler needs to identify those references and link them with the instance of `Type[T]`. +For instance consider the following example: + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[T] } +``` + +For each reference to a generic type `T` that is defined at level 0 and used at level 1 or greater, the compiler will summon a `Type[T]`. +This is usually the given type that is provided as parameter, `t` in this case. +We can use the type `t.Underlying` to replace `T` as it is an alias of that type. +But `t.Underlying` contains the extra information that it is `t` that will be used in the evaluation of the quote. +In a sense, `Underlying` acts like a splice for types. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[t.Underlying] } +``` + +Due to some technical limitations, it is not always possible to replace the type reference with the AST containing `t.Underlying`. +To overcome this limitation, we can simply define a list of type aliases at the start of the quote and insert the `t.Underlying` there. +This has the added advantage that we do not have to repeatedly insert the `t.Underlying` in the quote. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ type U = t.Underlying; List.empty[U] } +``` +These aliases can be used at any level within the quote and this transformation is only performed on quotes that are at level 0. + +```scala + '{ List.empty[T] ... '{ List.empty[T] } ... } +// becomes + '{ type U = t.Underlying; List.empty[U] ... '{ List.empty[U] } ... } +``` +If we define a generic type at level 1 or greater, it will not be subject to this transformation. +In some future compilation stage, when the definition of the generic type is at level 0, it will be subject to this transformation. +This simplifies the transformation logic and avoids leaking the encoding into code that a macro could inspect. + +```scala +'{ + def emptyList[T: Type](using Quotes): Expr[List[T]] = '{ List.empty[T] } + ... +} +``` +A similar transformation is performed on `Type.of[T]`. +Any generic type in `T` needs to have an implicitly given `Type[T]` in scope, which will also be used as a path. +The example: + +```scala +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[T] match ... +// becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[t.Underlying] match ... +// then becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + t match ... +``` + +The operation `Type.of[t.Underlying]` can be optimized to just `t`. +But this is not always the case. +If the generic reference is nested in the type, we will need to keep the `Type.of`. + +```scala +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[T]] match ... +// becomes +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[t.Underlying]] match ... +``` + +By doing this transformation, we ensure that each abstract type `U` used in `Type.of` has an implicit `Type[U]` in scope. +This representation makes it simpler to identify parts of the type that are statically known from those that are known dynamically. +Type aliases are also added within the type of the `Type.of` though these are not valid source code. +These would look like `Type.of[{type U = t.Underlying; Map[U, U]}]` if written in source code. + + +#### Splice Normalization + +The contents of a splice may refer to variables defined in the enclosing quote. +This complicates the process of serialization of the contents of the quotes. +To make serialization simple, we first transform the contents of each level 1 splice. +Consider the following example: + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ powerCode('{x}, n) } +} +``` + +The variable `x` is defined in the quote and used in the splice. +The normal form will extract all references to `x` and replace them with a staged version of `x`. +We will replace the reference to `x` of type `T` with a `$y` where `y` is of type `Expr[T]`. +Then we wrap the new contents of the splice in a lambda that defines `y` and apply it to the quoted version of `x`. +After this transformation we have 2 parts, a lambda without references to the quote, which knows how to compute the contents of the splice, and a sequence of quoted arguments that refer to variables defined in the lambda. + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ ((y: Expr[Int]) => powerCode('{$y}, n)).apply('x) } +} +``` + +In general, the splice normal form has the shape `${ .apply(*) }` and the following constraints: + * `` a lambda expression that does not refer to variables defined in the outer quote + * `` sequence of quoted expressions or `Type.of` containing references to variables defined in the enclosing quote and no references to local variables defined outside the enclosing quote + + +##### Function references normalization +A reference to a function `f` that receives parameters is not a valid value in Scala. +Such a function reference `f` can be eta-expaned as `x => f(x)` to be used as a lambda value. +Therefore function references cannot be transformed by the normalization as directly as other expressions as we cannot represent `'{f}` with a method reference type. +We can use the eta-expanded form of `f` in the normalized form. +For example, consider the reference to `f` below. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ '{ f(3)(4, 5) } } +} +``` + +To normalize this code, we can eta-expand the reference to `f` and place it in a quote containing a proper expression. +Therefore the normalized form of the argument `'{f}` becomes the quoted lambda `'{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }` and is an expression of type `Expr[Int => (Int, Int) => Int]`. +The eta-expansion produces one curried lambda per parameter list. +The application `f(3)(4, 5)` does not become `$g(3)(4, 5)` but `$g.apply(3).apply(4, 5)`. +We add the `apply` because `g` is not a quoted reference to a function but a curried lambda. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ + ( + (g: Expr[Int => (Int, Int) => Int]) => '{$g.apply(3).apply(4, 5)} + ).apply('{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }) + } +} +``` + +Then we can apply it and beta-reduce the application when generating the code. + +```scala + (g: Expr[Int => Int => Int]) => betaReduce('{$g.apply(3).apply(4)}) +``` + + +##### Variable assignment normalization +A reference to a mutable variable in the left-hand side of an assignment cannot be transformed directly as it is not in an expression position. +```scala +'{ + var x: Int = 5 + ${ g('{x = 2}) } +} +``` + +We can use the same strategy used for function references by eta-expanding the assignment operation `x = _` into `y => x = y`. + +```scala +'{ + var x: Int = 5 + ${ + g( + ( + (f: Expr[Int => Unit]) => betaReduce('{$f(2)}) + ).apply('{ (y: Int) => x = $y }) + ) + } +} +``` + + +##### Type normalization +Types defined in the quote are subject to a similar transformation. +In this example, `T` is defined within the quote at level 1 and used in the splice again at level 1. + +```scala +'{ def f[T] = ${ '{g[T]} } } +``` + +The normalization will add a `Type[T]` to the lambda, and we will insert this reference. +The difference is that it will add an alias similar to the one used in type healing. +In this example, we create a `type U` that aliases the staged type. + +```scala +'{ + def f[T] = ${ + ( + (t: Type[T]) => '{type U = t.Underling; g[U]} + ).apply(Type.of[T]) + } +} +``` + +#### Serialization + +Quoted code needs to be pickled to make it available at run-time in the next compilation phase. +We implement this by pickling the AST as a TASTy binary. + +##### TASTy +The TASTy format is the typed abstract syntax tree serialization format of Scala 3. +It usually pickles the fully elaborated code after type-checking and is kept along the generated Java classfiles. + + +##### Pickling +We use TASTy as a serialization format for the contents of the quotes. +To show how serialization is performed, we will use the following example. +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ powerCode('{x}, '{n}) } * ${ powerCode('{2}, '{n}) } +} +``` + +This quote is transformed into the following code when normalizing the splices. + +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) + } * ${ + ((m: Expr[Int]) => powerCode('{2}, m)).apply('n) + } +} +``` + +Splice normalization is a key part of the serialization process as it only allows references to variables defined in the quote in the arguments of the lambda in the splice. +This makes it possible to create a closed representation of the quote without much effort. +The first step is to remove all the splices and replace them with holes. +A hole is like a splice but it lacks the knowledge of how to compute the contents of the splice. +Instead, it knows the index of the hole and the contents of the arguments of the splice. +We can see this transformation in the following example where a hole is represented by `<< idx; holeType; args* >>`. + +```scala + ${ ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) } +// becomes + << 0; Double; x, n >> +``` + +As this was the first hole it has index 0. +The hole type is `Double`, which needs to be remembered now that we cannot infer it from the contents of the splice. +The arguments of the splice are `x` and `n`; note that they do not require quoting because they were moved out of the splice. + +References to healed types are handled in a similar way. +Consider the `emptyList` example, which shows the type aliases that are inserted into the quote. +```scala +'{ List.empty[T] } +// type healed to +'{ type U = t.Underlying; List.empty[U] } +``` +Instead of replacing a splice, we replace the `t.Underlying` type with a type hole. +The type hole is represented by `<< idx; bounds >>`. +```scala +'{ type U = << 0; Nothing..Any >>; List.empty[U] } +``` +Here, the bounds of `Nothing..Any` are the bounds of the original `T` type. +The types of a `Type.of` are transformed in the same way. + + +With these transformations, the contents of the quote or `Type.of` are guaranteed to be closed and therefore can be pickled. +The AST is pickled into TASTy, which is a sequence of bytes. +This sequence of bytes needs to be instantiated in the bytecode, but unfortunately it cannot be dumped into the classfile as bytes. +To reify it we encode the bytes into a Java `String`. +In the following examples we display this encoding in human readable form with the fictitious |tasty"..."| string literal. + +```scala +// pickled AST bytes encoded in a base64 string +tasty""" + val (x, n): (Double, Int) = (5, 2) + << 0; Double; x, n >> * << 1; Double; n >> +""" +// or +tasty""" + type U = << 0; Nothing..Any; >> + List.empty[U] +""" +``` +The contents of a quote or `Type.of` are not always pickled. +In some cases it is better to generate equivalent (smaller and/or faster) code that will compute the expression. +Literal values are compiled into a call to `Expr()` using the implementation of `ToExpr` to create the quoted expression. +This is currently performed only on literal values, but can be extended to any value for which we have a `ToExpr` defined in the standard library. +Similarly, for non-generic types we can use their respective `java.lang.Class` and convert them into a `Type` using a primitive operation `typeConstructorOf` defined in the reflection API. + +##### Unpickling + +Now that we have seen how a quote is pickled, we can look at how to unpickle it. +We will continue with the previous example. + +Holes were used to replace the splices in the quote. +When we perform this transformation we also need to remember the lambdas from the splices and their hole index. +When unpickling a hole, the corresponding splice lambda will be used to compute the contents of the hole. +The lambda will receive as parameters quoted versions of the arguments of the hole. +For example to compute the contents of `<< 0; Double; x, n >>` we will evaluate the following code + +```scala + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) +``` + +The evaluation is not as trivial as it looks, because the lambda comes from compiled code and the rest is code that must be interpreted. +We put the AST of `x` and `n` into `Expr` objects to simulate the quotes and then we use Java Reflection to call the `apply` method. + +We may have many holes in a quote and therefore as many lambdas. +To avoid the instantiation of many lambdas, we can join them together into a single lambda. +Apart from the list of arguments, this lambda will also take the index of the hole that is being evaluated. +It will perform a switch match on the index and call the corresponding lambda in each branch. +Each branch will also extract the arguments depending on the definition of the lambda. +The application of the original lambdas are beta-reduced to avoid extra overhead. + +```scala +(idx: Int, args: Seq[Any]) => + idx match + case 0 => // for << 0; Double; x, n >> + val x = args(0).asInstanceOf[Expr[Double]] + val n = args(1).asInstanceOf[Expr[Int]] + powerCode(x, n) + case 1 => // for << 1; Double; n >> + val n = args(0).asInstanceOf[Expr[Int]] + powerCode('{2}, n) +``` + +This is similar to what we do for splices when we replace the type aliased with holes we keep track of the index of the hole. +Instead of lambdas, we will have a list of references to instances of `Type`. +From the following example we would extract `t`, `u`, ... . + +```scala + '{ type T1 = t1.Underlying; type Tn = tn.Underlying; ... } +// with holes + '{ type T1 = << 0; ... >>; type Tn = << n-1; ... >>; ... } +``` + +As the type holes are at the start of the quote, they will have the first `N` indices. +This implies that we can place the references in a sequence `Seq(t, u, ...)` where the index in the sequence is the same as the hole index. + +Lastly, the quote itself is replaced by a call to `QuoteUnpickler.unpickleExpr` which will unpickle the AST, evaluate the holes, i.e., splices, and wrap the resulting AST in an `Expr[Int]`. +This method takes takes the pickled |tasty"..."|, the types and the hole lambda. +Similarly, `Type.of` is replaced with a call to `QuoteUnpickler.unpickleType` but only receives the pickled |tasty"..."| and the types. +Because `QuoteUnpickler` is part of the self-type of the `Quotes` class, we have to cast the instance but know that this cast will always succeed. + +```scala +quotes.asInstanceOf[runtime.QuoteUnpickler].unpickleExpr[T]( + pickled = tasty"...", + types = Seq(...), + holes = (idx: Int, args: Seq[Any]) => idx match ... +) +``` + +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Multi-stage programming with generative and analytical macros](https://dl.acm.org/doi/10.1145/3486609.3487203). +[^3]: In quotes, identifiers starting with `$` must be surrounded by backticks (`` `$` ``). For example `$conforms` from `scala.Predef`. diff --git a/docs/_spec/TODOreference/metaprogramming/macros.md b/docs/_spec/TODOreference/metaprogramming/macros.md new file mode 100644 index 000000000000..e39f6f1022b8 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/macros.md @@ -0,0 +1,621 @@ +--- +layout: doc-page +title: "Macros" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros.html +--- + +> When developing macros enable `-Xcheck-macros` scalac option flag to have extra runtime checks. + +## Multi-Staging + +#### Quoted expressions +Multi-stage programming in Scala 3 uses quotes `'{..}` to delay, i.e., stage, execution of code and splices `${..}` to evaluate and insert code into quotes. +Quoted expressions are typed as `Expr[T]` with a covariant type parameter `T`. +It is easy to write statically safe code generators with these two concepts. +The following example shows a naive implementation of the $x^n$ mathematical operation. + +```scala +import scala.quoted.* +def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } + else if n == 1 then x + else '{ $x * ${ unrolledPowerCode(x, n-1) } } +``` + +```scala +'{ + val x = ... + ${ unrolledPowerCode('{x}, 3) } // evaluates to: x * x * x +} +``` + +Quotes and splices are duals of each other. +For an arbitrary expression `x` of type `T` we have `${'{x}} = x` and for an arbitrary expression `e` of type `Expr[T]` we have `'{${e}} = e`. + +#### Abstract types +Quotes can handle generic and abstract types using the type class `Type[T]`. +A quote that refers to a generic or abstract type `T` requires a given `Type[T]` to be provided in the implicit scope. +The following examples show how `T` is annotated with a context bound (`: Type`) to provide an implicit `Type[T]`, or the equivalent `using Type[T]` parameter. + +```scala +import scala.quoted.* +def singletonListExpr[T: Type](x: Expr[T])(using Quotes): Expr[List[T]] = + '{ List[T]($x) } // generic T used within a quote + +def emptyListExpr[T](using Type[T], Quotes): Expr[List[T]] = + '{ List.empty[T] } // generic T used within a quote +``` + +If no other instance is found, the default `Type.of[T]` is used. +The following example implicitly uses `Type.of[String]` and `Type.of[Option[U]]`. +```scala +val list1: Expr[List[String]] = + singletonListExpr('{"hello"}) // requires a given `Type[Sting]` +val list0: Expr[List[Option[T]]] = + emptyListExpr[Option[U]] // requires a given `Type[Option[U]]` +``` + + +The `Type.of[T]` method is a primitive operation that the compiler will handle specially. +It will provide the implicit if the type `T` is statically known, or if `T` contains some other types `Ui` for which we have an implicit `Type[Ui]`. +In the example, `Type.of[String]` has a statically known type and `Type.of[Option[U]]` requires an implicit `Type[U]` in scope. + +#### Quote context +We also track the current quotation context using a given `Quotes` instance. +To create a quote `'{..}` we require a given `Quotes` context, which should be passed as a contextual parameter `(using Quotes)` to the function. +Each splice will provide a new `Quotes` context within the scope of the splice. +Therefore quotes and splices can be seen as methods with the following signatures, but with special semantics. +```scala +def '[T](x: T): Quotes ?=> Expr[T] // def '[T](x: T)(using Quotes): Expr[T] + +def $[T](x: Quotes ?=> Expr[T]): T +``` + +The lambda with a question mark `?=>` is a contextual function; it is a lambda that takes its argument implicitly and provides it implicitly in the implementation the lambda. +`Quotes` are used for a variety of purposes that will be mentioned when covering those topics. + +## Quoted Values + +#### Lifting +While it is not possible to use cross-stage persistence of local variables, it is possible to lift them to the next stage. +To this end, we provide the `Expr.apply` method, which can take a value and lift it into a quoted representation of the value. + +```scala +val expr1plus1: Expr[Int] = '{ 1 + 1 } + +val expr2: Expr[Int] = Expr(1 + 1) // lift 2 into '{ 2 } +``` + +While it looks type wise similar to `'{ 1 + 1 }`, the semantics of `Expr(1 + 1)` are quite different. +`Expr(1 + 1)` will not stage or delay any computation; the argument is evaluated to a value and then lifted into a quote. +The quote will contain code that will create a copy of this value in the next stage. +`Expr` is polymorphic and user-extensible via the `ToExpr` type class. + +```scala +trait ToExpr[T]: + def apply(x: T)(using Quotes): Expr[T] +``` + +We can implement a `ToExpr` using a `given` definition that will add the definition to the implicits in scope. +In the following example we show how to implement a `ToExpr[Option[T]]` for any liftable type `T. + +```scala +given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with + def apply(opt: Option[T])(using Quotes): Expr[Option[T]] = + opt match + case Some(x) => '{ Some[T]( ${Expr(x)} ) } + case None => '{ None } +``` + +The `ToExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. + +#### Extracting values from quotes +To be able to generate optimized code using the method `unrolledPowerCode`, the macro implementation `powerCode` needs to first +determine whether the argument passed as parameter `n` is a known constant value. +This can be achieved via _unlifting_ using the `Expr.unapply` extractor from our library implementation, which will only match if `n` is a quoted constant and extracts its value. + +```scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + n match + case Expr(m) => // it is a constant: unlift code n='{m} into number m + unrolledPowerCode(x, m) + case _ => // not known: call power at run-time + '{ power($x, $n) } +``` + +Alternatively, the `n.value` method can be used to get an `Option[Int]` with the value or `n.valueOrAbort` to get the value directly. +```scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + // emits an error message if `n` is not a constant + unrolledPowerCode(x, n.valueOrAbort) +``` + +`Expr.unapply` and all variants of `value` are polymorphic and user-extensible via a given `FromExpr` type class. + +```scala +trait FromExpr[T]: + def unapply(x: Expr[T])(using Quotes): Option[T] +``` + +We can use `given` definitions to implement the `FromExpr` as we did for `ToExpr`. +The `FromExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. +To implement `FromExpr` for non-primitive types we use quote pattern matching (for example `OptionFromExpr`). + + +## Macros and Multi-Stage Programming + +The system supports multi-stage macros and run-time multi-stage programming using the same quotation abstractions. + +### Multi-Stage Macros + +#### Macros +We can generalize the splicing abstraction to express macros. +A macro consists of a top-level splice that is not nested in any quote. +Conceptually, the contents of the splice are evaluated one stage earlier than the program. +In other words, the contents are evaluated while compiling the program. The generated code resulting from the macro replaces the splice in the program. + +```scala +def power2(x: Double): Double = + ${ unrolledPowerCode('x, 2) } // x * x +``` + +#### Inline macros +Since using the splices in the middle of a program is not as ergonomic as calling a function; we hide the staging mechanism from end-users of macros. We have a uniform way of calling macros and normal functions. +For this, _we restrict the use of top-level splices to only appear in inline methods_[^1][^2]. + +```scala +// inline macro definition +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + +// user code +def power2(x: Double): Double = + powerMacro(x, 2) // x * x +``` + +The evaluation of the macro will only happen when the code is inlined into `power2`. +When inlined, the code is equivalent to the previous definition of `power2`. +A consequence of using inline methods is that none of the arguments nor the return type of the macro will have to mention the `Expr` types; this hides all aspects of metaprogramming from the end-users. + +#### Avoiding a complete interpreter +When evaluating a top-level splice, the compiler needs to interpret the code that is within the splice. +Providing an interpreter for the entire language is quite tricky, and it is even more challenging to make that interpreter run efficiently. +To avoid needing a complete interpreter, we can impose the following restrictions on splices to simplify the evaluation of the code in top-level splices. + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are literal constants, quoted expressions (parameters), calls to `Type.of` for type parameters and a reference to `Quotes`. + +In particular, these restrictions disallow the use of splices in top-level splices. +Such a splice would require several stages of interpretation which would be unnecessarily inefficient. + +#### Compilation stages +The macro implementation (i.e., the method called in the top-level splice) can come from any pre-compiled library. +This provides a clear difference between the stages of the compilation process. +Consider the following 3 source files defined in distinct libraries. +```scala +// Macro.scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = ... +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } +``` + +```scala +// Lib.scala (depends on Macro.scala) +def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) +``` + +```scala +// App.scala (depends on Lib.scala) +@main def app() = power2(3.14) +``` +One way to syntactically visualize this is to put the application in a quote that delays the compilation of the application. +Then the application dependencies can be placed in an outer quote that contains the quoted application, and we repeat this recursively for dependencies of dependencies. + +```scala +'{ // macro library (compilation stage 1) + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + ... + inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + '{ // library using macros (compilation stage 2) + def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) + '{ power2(3.14) /* app (compilation stage 3) */ } + } +} +``` + +To make the system more versatile, we allow calling macros in the project where it is defined, with some restrictions. +For example, to compile `Macro.scala` and `Lib.scala` together in the same library. +To this end, we do not follow the simpler syntactic model and rely on semantic information from the source files. +When compiling a source, if we detect a call to a macro that is not compiled yet, we delay the compilation of this source to the following compilation stage. +In the example, we would delay the compilation of `Lib.scala` because it contains a compile-time call to `powerCode`. +Compilation stages are repeated until all sources are compiled, or no progress can be made. +If no progress is made, there was a cyclic dependency between the definition and the use of the macro. +We also need to detect if at runtime the macro depends on sources that have not been compiled yet. +These are detected by executing the macro and checking for JVM linking errors to classes that have not been compiled yet. + +### Run-Time Multi-Stage Programming + +See [Run-Time Multi-Stage Programming](./staging.md) + +## Safety + +Multi-stage programming is by design statically safe and cross-stage safe. + +### Static Safety + +#### Hygiene +All identifier names are interpreted as symbolic references to the corresponding variable in the context of the quote. +Therefore, while evaluating the quote, it is not possible to accidentally rebind a reference to a new variable with the same textual name. + +#### Well-typed +If a quote is well typed, then the generated code is well typed. +This is a simple consequence of tracking the type of each expression. +An `Expr[T]` can only be created from a quote that contains an expression of type `T. +Conversely, an `Expr[T]` can only be spliced in a location that expects a type `T. +As mentioned before, `Expr` is covariant in its type parameter. +This means that an `Expr[T]` can contain an expression of a subtype of `T. +When spliced in a location that expects a type `T, these expressions also have a valid type. + +### Cross-Stage Safety + +#### Level consistency +We define the _staging level_ of some code as the number of quotes minus the number of splices surrounding said code. +Local variables must be defined and used in the same staging level. + +It is never possible to access a local variable from a lower staging level as it does not yet exist. + +```scala +def badPower(x: Double, n: Int): Double = + ${ unrolledPowerCode('x, n) } // error: value of `n` not known yet +``` + + +In the context of macros and _cross-platform portability_, that is, +macros compiled on one machine but potentially executed on another, +we cannot support cross-stage persistence of local variables. +Therefore, local variables can only be accessed at precisely the same staging level in our system. + +```scala +def badPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + // error: `n` potentially not available in the next execution environment + '{ power($x, n) } +``` + + +The rules are slightly different for global definitions, such as `unrolledPowerCode`. +It is possible to generate code that contains a reference to a _global_ definition such as in `'{ power(2, 4) }`. +This is a limited form of cross-stage persistence that does not impede cross-platform portability, where we refer to the already compiled code for `power`. +Each compilation step will lower the staging level by one while keeping global definitions. +In consequence, we can refer to compiled definitions in macros such as `unrolledPowerCode` in `${ unrolledPowerCode('x, 2) }`. + +We can sumarize level consistency in two rules: + * Local variables can be used only at the same staging level as their definition + * Global variables can be used at any staging level + + +#### Type consistency +As Scala uses type erasure, generic types will be erased at run-time and hence in any following stage. +To ensure any quoted expression that refers to a generic type `T` does not lose the information it needs, we require a given `Type[T]` in scope. +The `Type[T]` will carry over the non-erased representation of the type into the next phase. +Therefore any generic type used at a higher staging level than its definition will require its `Type`. + +#### Scope extrusion +Within the contents of a splice, it is possible to have a quote that refers to a local variable defined in the outer quote. +If this quote is used within the splice, the variable will be in scope. +However, if the quote is somehow _extruded_ outside the splice, then variables might not be in scope anymore. +Quoted expressions can be extruded using side effects such as mutable state and exceptions. +The following example shows how a quote can be extruded using mutable state. +```scala +var x: Expr[T] = null +'{ (y: T) => ${ x = 'y; 1 } } +x // has value '{y} but y is not in scope +``` + +A second way a variable can be extruded is through the `run` method. +If `run` consumes a quoted variable reference, it will not be in scope anymore. +The result will reference a variable that is defined in the next stage. + +```scala +'{ (x: Int) => ${ run('x); ... } } +// evaluates to: '{ (x: Int) => ${ x; ... } 1 +``` + +To catch both scope extrusion scenarios, our system restricts the use of quotes by only allowing a quote to be spliced if it was not extruded from a splice scope. +Unlike level consistency, this is checked at run-time[^4] rather than compile-time to avoid making the static type system too complicated. + +Each `Quotes` instance contains a unique scope identifier and refers to its parent scope, forming a stack of identifiers. +The parent of the scope of a `Quotes` is the scope of the `Quotes` used to create the enclosing quote. +Top-level splices and `run` create new scope stacks. +Every `Expr` knows in which scope it was created. +When it is spliced, we check that the quote scope is either the same as the splice scope, or a parent scope thereof. + + +## Staged Lambdas + +When staging programs in a functional language there are two fundamental abstractions: a staged lambda `Expr[T => U]` and a staging lambda `Expr[T] => Expr[U]`. +The first is a function that will exist in the next stage, whereas the second is a function that exists in the current stage. +It is often convenient to have a mechanism to go from `Expr[T => U]` to `Expr[T] => Expr[U]` and vice versa. + +```scala +def later[T: Type, U: Type](f: Expr[T] => Expr[U]): Expr[T => U] = + '{ (x: T) => ${ f('x) } } + +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => '{ $f($x) } +``` + +Both conversions can be performed out of the box with quotes and splices. +But if `f` is a known lambda function, `'{ $f($x) }` will not beta-reduce the lambda in place. +This optimization is performed in a later phase of the compiler. +Not reducing the application immediately can simplify analysis of generated code. +Nevertheless, it is possible to beta-reduce the lambda in place using the `Expr.betaReduce` method. + +```scala +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => Expr.betaReduce('{ $f($x) }) +``` + +The `betaReduce` method will beta-reduce the outermost application of the expression if possible (regardless of arity). +If it is not possible to beta-reduce the expression, then it will return the original expression. + +## Staged Constructors +To create new class instances in a later stage, we can create them using factory methods (usually `apply` methods of an `object`), or we can instantiate them with a `new`. +For example, we can write `Some(1)` or `new Some(1)`, creating the same value. +In Scala 3, using the factory method call notation will fall back to a `new` if no `apply` method is found. +We follow the usual staging rules when calling a factory method. +Similarly, when we use a `new C`, the constructor of `C` is implicitly called, which also follows the usual staging rules. +Therefore for an arbitrary known class `C`, we can use both `'{ C(...) }` or `'{ new C(...) }` as constructors. + +## Staged Classes +Quoted code can contain any valid expression including local class definitions. +This allows the creation of new classes with specialized implementations. +For example, we can implement a new version of `Runnable` that will perform some optimized operation. +```scala +def mkRunnable(x: Int)(using Quotes): Expr[Runnable] = '{ + class MyRunnable extends Runnable: + def run(): Unit = ... // generate some custom code that uses `x` + new MyRunnable +} +``` + +The quoted class is a local class and its type cannot escape the enclosing quote. +The class must be used inside the quote or an instance of it can be returned using a known interface (`Runnable` in this case). + +## Quote Pattern Matching + +It is sometimes necessary to analyze the structure of the code or decompose the code into its sub-expressions. +A classic example is an embedded DSL, where a macro knows a set of definitions that it can reinterpret while compiling the code (for instance, to perform optimizations). +In the following example, we extend our previous implementation of `powCode` to look into `x` to perform further optimizations. + +```scala +def fusedPowCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + x match + case '{ power($y, $m) } => // we have (y^m)^n + fusedPowCode(y, '{ $n * $m }) // generate code for y^(n*m) + case _ => + '{ power($x, $n) } +``` + + +#### Sub-patterns + +In quoted patterns, the `$` binds the sub-expression to an expression `Expr` that can be used in that `case` branch. +The contents of `${..}` in a quote pattern are regular Scala patterns. +For example, we can use the `Expr(_)` pattern within the `${..}` to only match if it is a known value and extract it. + +```scala +def fusedUnrolledPowCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + x match + case '{ power($y, ${Expr(m)}) } => // we have (y^m)^n + fusedUnrolledPowCode(y, n * m) // generate code for y * ... * y + case _ => // ( n*m times ) + unrolledPowerCode(x, n) +``` + +These value extraction sub-patterns can be polymorphic using an instance of `FromExpr`. +In the following example, we show the implementation of `OptionFromExpr` which internally uses the `FromExpr[T]` to extract the value using the `Expr(x)` pattern. + +```scala +given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with + def unapply(x: Expr[Option[T]])(using Quotes): Option[Option[T]] = + x match + case '{ Some( ${Expr(x)} ) } => Some(Some(x)) + case '{ None } => Some(None) + case _ => None +``` + + + +#### Closed patterns +Patterns may contain two kinds of references: global references such as the call to the `power` method in `'{ power(...) }`, or references to bindings defined in the pattern such as `x` in `case '{ (x: Int) => x }`. +When extracting an expression from a quote, we need to ensure that we do not extrude any variable from the scope where it is defined. + +```scala +'{ (x: Int) => x + 1 } match + case '{ (y: Int) => $z } => + // should not match, otherwise: z = '{ x + 1 } +``` + +In this example, we see that the pattern should not match. +Otherwise, any use of the expression `z` would contain an unbound reference to `x`. +To avoid any such extrusion, we only match on a `${..}` if its expression is closed under the definitions within the pattern. +Therefore, the pattern will not match if the expression is not closed. + +#### HOAS patterns +To allow extracting expressions that may contain extruded references we offer a _higher-order abstract syntax_ (HOAS) pattern `$f(y)` (or `$f(y1,...,yn)`). +This pattern will eta-expand the sub-expression with respect to `y` and bind it to `f`. +The lambda arguments will replace the variables that might have been extruded. + +```scala +'{ ((x: Int) => x + 1).apply(2) } match + case '{ ((y: Int) => $f(y)).apply($z: Int) } => + // f may contain references to `x` (replaced by `$y`) + // f = (y: Expr[Int]) => '{ $y + 1 } + f(z) // generates '{ 2 + 1 } +``` + + +A HOAS pattern `$x(y1,...,yn)` will only match the expression if it does not contain references to variables defined in the pattern that are not in the set `y1,...,yn`. +In other words, the pattern will match if the expression only contains references to variables defined in the pattern that are in `y1,...,yn`. +Note that the HOAS patterns `$x()` are semantically equivalent to closed patterns `$x`. + + +#### Type variables + +Expressions may contain types that are not statically known. +For example, an `Expr[List[Int]]` may contain `list.map(_.toInt)` where `list` is a `List` of some type. +To cover all the possible cases we would need to explicitly match `list` on all possible types (`List[Int]`, `List[Int => Int]`, ...). +This is an infinite set of types and therefore pattern cases. +Even if we would know all possible types that a specific program could use, we may still end up with an unmanageable number of cases. +To overcome this, we introduce type variables in quoted patterns, which will match any type. + +In the following example, we show how type variables `t` and `u` match all possible pairs of consecutive calls to `map` on lists. +In the quoted patterns, types named with lower cases are identified as type variables. +This follows the same notation as type variables used in normal patterns. +```scala +def fuseMapCode(x: Expr[List[Int]]): Expr[List[Int]] = + x match + case '{ ($ls: List[t]).map[u]($f).map[Int]($g) } => + '{ $ls.map($g.compose($f)) } + ... + +fuseMapCode('{ List(1.2).map(f).map(g) }) // '{ List(1.2).map(g.compose(f)) } +fuseMapCode('{ List('a').map(h).map(i) }) // '{ List('a').map(i.compose(h)) } +``` +Variables `f` and `g` are inferred to be of type `Expr[t => u]` and `Expr[u => Int]` respectively. +Subsequently, we can infer `$g.compose($f)` to be of type `Expr[t => Int]` which is the type of the argument of `$ls.map(..)`. + +Type variables are abstract types that will be erased; this implies that to reference them in the second quote we need a given `Type[t]` and `Type[u]`. +The quoted pattern will implicitly provide those given types. +At run-time, when the pattern matches, the type of `t` and `u` will be known, and the `Type[t]` and `Type[u]` will contain the precise types in the expression. + +As `Expr` is covariant, the statically known type of the expression might not be the actual type. +Type variables can also be used to recover the precise type of the expression. +``scala +def let(x: Expr[Any])(using Quotes): Expr[Any] = + x match + case '{ $x: t } => + '{ val y: t = $x; y } + +let('{1}) // will return a `Expr[Any]` that contains an `Expr[Int]]` +``` + +While we can define the type variable in the middle of the pattern, their normal form is to define them as a `type` with a lower case name at the start of the pattern. +We use the Scala backquote `` `t` `` naming convention which interprets the string within the backquote as a literal name identifier. +This is typically used when we have names that contain special characters that are not allowed for normal Scala identifiers. +But we use it to explicitly state that this is a reference to that name and not the introduction of a new variable. +```scala + case '{ type t; $x: `t` } => +``` +This is a bit more verbose but has some expressivity advantages such as allowing to define bounds on the variables and be able to refer to them several times in any scope of the pattern. + +```scala + case '{ type t >: List[Int] <: Seq[Int]; $x: `t` } => + case '{ type t; $x: (`t`, `t`) } => +``` + + +#### Type patterns +It is possible to only have a type and no expression of that type. +To be able to inspect a type, we introduce quoted type pattern `case '[..] =>`. +It works the same way as a quoted pattern but is restricted to contain a type. +Type variables can be used in quoted type patterns to extract a type. + +```scala +def empty[T: Type]: Expr[T] = + Type.of[T] match + case '[String] => '{ "" } + case '[List[t]] => '{ List.empty[t] } + ... +``` + +`Type.of[T]` is used to summon the given instance of `Type[T]` in scope, it is equivalent to `summon[Type[T]]`. + +#### Type testing and casting +It is important to note that instance checks and casts on `Expr`, such as `isInstanceOf[Expr[T]]` and `asInstanceOf[Expr[T]]`, will only check if the instance is of the class `Expr` but will not be able to check the `T` argument. +These cases will issue a warning at compile-time, but if they are ignored, they can result in unexpected behavior. + +These operations can be supported correctly in the system. +For a simple type test it is possible to use the `isExprOf[T]` method of `Expr` to check if it is an instance of that type. +Similarly, it is possible to use `asExprOf[T]` to cast an expression to a given type. +These operations use a given `Type[T]` to work around type erasure. + + +## Sub-Expression Transformation + +The system provides a mechanism to transform all sub-expressions of an expression. +This is useful when the sub-expressions we want to transform are deep in the expression. +It is also necessary if the expression contains sub-expressions that cannot be matched using quoted patterns (such as local class definitions). + +```scala +trait ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] + def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + ... +``` + +Users can extend the `ExprMap` trait and implement the `transform` method. +This interface is flexible and can implement top-down, bottom-up, or other transformations. + +```scala +object OptimizeIdentity extends ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + transformChildren(e) match // bottom-up transformation + case '{ identity($x) } => x + case _ => e +``` + +The `transformChildren` method is implemented as a primitive that knows how to reach all the direct sub-expressions and calls `transform` on each one. +The type passed to `transform` is the expected type of this sub-expression in its expression. +For example while transforming `Some(1)` in `'{ val x: Option[Int] = Some(1); ...}` the type will be `Option[Int]` and not `Some[Int]`. +This implies that we can safely transform `Some(1)` into `None`. + +## Staged Implicit Summoning +When summoning implicit arguments using `summon`, we will find the given instances in the current scope. +It is possible to use `summon` to get staged implicit arguments by explicitly staging them first. +In the following example, we can pass an implicit `Ordering[T]` in a macro as an `Expr[Ordering[T]]` to its implementation. +Then we can splice it and give it implicitly in the next stage. + +```scala +inline def treeSetFor[T](using ord: Ordering[T]): Set[T] = + ${ setExpr[T](using 'ord) } + +def setExpr[T:Type](using ord: Expr[Ordering[T]])(using Quotes): Expr[Set[T]] = + '{ given Ordering[T] = $ord; new TreeSet[T]() } +``` + +We pass it as an implicit `Expr[Ordering[T]]` because there might be intermediate methods that can pass it along implicitly. + +An alternative is to summon implicit values in the scope where the macro is invoked. +Using the `Expr.summon` method we get an optional expression containing the implicit instance. +This provides the ability to search for implicit instances conditionally. + +```scala +def summon[T: Type](using Quotes): Option[Expr[T]] +``` + +```scala +inline def setFor[T]: Set[T] = + ${ setForExpr[T] } + +def setForExpr[T: Type]()(using Quotes): Expr[Set[T]] = + Expr.summon[Ordering[T]] match + case Some(ord) => + '{ new TreeSet[T]()($ord) } + case _ => + '{ new HashSet[T] } +``` + +## More details + +[More details](./macros-spec.md) + + +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) +[^3]: Implemented in the Scala 3 Dotty project https://github.com/lampepfl/dotty. sbt library dependency `"org.scala-lang" %% "scala3-staging" % scalaVersion.value` +[^4]: Using the `-Xcheck-macros` compiler flag diff --git a/docs/_spec/TODOreference/metaprogramming/metaprogramming.md b/docs/_spec/TODOreference/metaprogramming/metaprogramming.md new file mode 100644 index 000000000000..3bce2d7c922e --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/metaprogramming.md @@ -0,0 +1,47 @@ +--- +layout: index +title: "Metaprogramming" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming.html +--- + +The following pages introduce the redesign of metaprogramming in Scala. They +introduce the following fundamental facilities: + +1. [`inline`](./inline.md) is a new modifier that guarantees that + a definition will be inlined at the point of use. The primary motivation + behind inline is to reduce the overhead behind function calls and access to + values. The expansion will be performed by the Scala compiler during the + `Typer` compiler phase. As opposed to inlining in some other ecosystems, + inlining in Scala is not merely a request to the compiler but is a + _command_. The reason is that inlining in Scala can drive other compile-time + operations, like inline pattern matching (enabling type-level + programming), macros (enabling compile-time, generative, metaprogramming) and + runtime code generation (multi-stage programming). + +2. [Compile-time ops](./compiletime-ops.md) are helper definitions in the + standard library that provide support for compile-time operations over values and types. + +3. [Macros](./macros.md) are built on two well-known fundamental + operations: quotation and splicing. Quotation converts program code to + data, specifically, a (tree-like) representation of this code. It is + expressed as `'{...}` for expressions and as `'[...]` for types. Splicing, + expressed as `${ ... }`, goes the other way: it converts a program's representation + to program code. Together with `inline`, these two abstractions allow + to construct program code programmatically. + +4. [Runtime Staging](./staging.md) Where macros construct code at _compile-time_, + staging lets programs construct new code at _runtime_. That way, + code generation can depend not only on static data but also on data available at runtime. This splits the evaluation of the program in two or more phases or ... + stages. Consequently, this method of generative programming is called "Multi-Stage Programming". Staging is built on the same foundations as macros. It uses + quotes and splices, but leaves out `inline`. + +5. [Reflection](./reflection.md) Quotations are a "black-box" + representation of code. They can be parameterized and composed using + splices, but their structure cannot be analyzed from the outside. TASTy + reflection gives a way to analyze code structure by partly revealing the representation type of a piece of code in a standard API. The representation + type is a form of typed abstract syntax tree, which gives rise to the `TASTy` + moniker. + +6. [TASTy Inspection](./tasty-inspect.md) Typed abstract syntax trees are serialized + in a custom compressed binary format stored in `.tasty` files. TASTy inspection allows + to load these files and analyze their content's tree structure. diff --git a/docs/_spec/TODOreference/metaprogramming/reflection.md b/docs/_spec/TODOreference/metaprogramming/reflection.md new file mode 100644 index 000000000000..b2d492657a4e --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/reflection.md @@ -0,0 +1,131 @@ +--- +layout: doc-page +title: "Reflection" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/reflection.html +--- + +Reflection enables inspection and construction of Typed Abstract Syntax Trees +(Typed-AST). It may be used on quoted expressions (`quoted.Expr`) and quoted +types (`quoted.Type`) from [Macros](./macros.md) or on full TASTy files. + +If you are writing macros, please first read [Macros](./macros.md). +You may find all you need without using quote reflection. + +## API: From quotes and splices to TASTy reflect trees and back + +With `quoted.Expr` and `quoted.Type` we can compute code but also analyze code +by inspecting the ASTs. [Macros](./macros.md) provide the guarantee that the +generation of code will be type-correct. Using quote reflection will break these +guarantees and may fail at macro expansion time, hence additional explicit +checks must be done. + +To provide reflection capabilities in macros we need to add an implicit parameter +of type `scala.quoted.Quotes` and import `quotes.reflect.*` from it in the scope +where it is used. + +```scala +import scala.quoted.* + +inline def natConst(inline x: Int): Int = ${natConstImpl('{x})} + +def natConstImpl(x: Expr[Int])(using Quotes): Expr[Int] = + import quotes.reflect.* + ... +``` + +### Extractors + +`import quotes.reflect.*` will provide all extractors and methods on `quotes.reflect.Tree`s. +For example the `Literal(_)` extractor used below. + +```scala +def natConstImpl(x: Expr[Int])(using Quotes): Expr[Int] = + import quotes.reflect.* + val tree: Term = x.asTerm + tree match + case Inlined(_, _, Literal(IntConstant(n))) => + if n <= 0 then + report.error("Parameter must be natural number") + '{0} + else + tree.asExprOf[Int] + case _ => + report.error("Parameter must be a known constant") + '{0} +``` + +We can easily know which extractors are needed using `Printer.TreeStructure.show`, +which returns the string representation the structure of the tree. Other printers +can also be found in the `Printer` module. + +```scala +tree.show(using Printer.TreeStructure) +// or +Printer.TreeStructure.show(tree) +``` + +The methods `quotes.reflect.Term.{asExpr, asExprOf}` provide a way to go back to +a `quoted.Expr`. Note that `asExpr` returns a `Expr[Any]`. On the other hand +`asExprOf[T]` returns a `Expr[T]`, if the type does not conform to it an exception +will be thrown at runtime. + +### Positions + +The `Position` in the context provides an `ofMacroExpansion` value. It corresponds +to the expansion site for macros. The macro authors can obtain various information +about that expansion site. The example below shows how we can obtain position +information such as the start line, the end line or even the source code at the +expansion point. + +```scala +def macroImpl()(quotes: Quotes): Expr[Unit] = + import quotes.reflect.* + val pos = Position.ofMacroExpansion + + val path = pos.sourceFile.jpath.toString + val start = pos.start + val end = pos.end + val startLine = pos.startLine + val endLine = pos.endLine + val startColumn = pos.startColumn + val endColumn = pos.endColumn + val sourceCode = pos.sourceCode + ... +``` + +### Tree Utilities + +`quotes.reflect` contains three facilities for tree traversal and +transformation. + +`TreeAccumulator` ties the knot of a traversal. By calling `foldOver(x, tree)(owner)` +we can dive into the `tree` node and start accumulating values of type `X` (e.g., +of type `List[Symbol]` if we want to collect symbols). The code below, for +example, collects the `val` definitions in the tree. + +```scala +def collectPatternVariables(tree: Tree)(using ctx: Context): List[Symbol] = + val acc = new TreeAccumulator[List[Symbol]]: + def foldTree(syms: List[Symbol], tree: Tree)(owner: Symbol): List[Symbol] = tree match + case ValDef(_, _, rhs) => + val newSyms = tree.symbol :: syms + foldTree(newSyms, body)(tree.symbol) + case _ => + foldOverTree(syms, tree)(owner) + acc(Nil, tree) +``` + +A `TreeTraverser` extends a `TreeAccumulator` and performs the same traversal +but without returning any value. Finally, a `TreeMap` performs a transformation. + +#### ValDef.let + +`quotes.reflect.ValDef` also offers a method `let` that allows us to bind the `rhs` (right-hand side) to a `val` and use it in `body`. +Additionally, `lets` binds the given `terms` to names and allows to use them in the `body`. +Their type definitions are shown below: + +```scala +def let(rhs: Term)(body: Ident => Term): Term = ... + +def lets(terms: List[Term])(body: List[Term] => Term): Term = ... +``` diff --git a/docs/_spec/TODOreference/metaprogramming/simple-smp.md b/docs/_spec/TODOreference/metaprogramming/simple-smp.md new file mode 100644 index 000000000000..2ba0155ad329 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/simple-smp.md @@ -0,0 +1,232 @@ +--- +layout: doc-page +title: "The Meta-theory of Symmetric Metaprogramming" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/simple-smp.html +--- + +This note presents a simplified variant of +[principled metaprogramming](./macros.md) +and sketches its soundness proof. The variant treats only dialogues +between two stages. A program can have quotes which can contain +splices (which can contain quotes, which can contain splices, and so +on). Or the program could start with a splice with embedded +quotes. The essential restriction is that (1) a term can contain top-level +quotes or top-level splices, but not both, and (2) quotes cannot appear +directly inside quotes and splices cannot appear directly inside +splices. In other words, the universe is restricted to two phases +only. + +Under this restriction we can simplify the typing rules so that there are +always exactly two environments instead of having a stack of environments. +The variant presented here differs from the full calculus also in that we +replace evaluation contexts with contextual typing rules. While this +is more verbose, it makes it easier to set up the meta theory. + +## Syntax +``` +Terms t ::= x variable + (x: T) => t lambda + t t application + ’t quote + ~t splice + +Simple terms u ::= x | (x: T) => u | u u + +Values v ::= (x: T) => t lambda + ’u quoted value + +Types T ::= A base type + T -> T function type + ’T quoted type +``` +## Operational semantics + +### Evaluation +``` + ((x: T) => t) v --> [x := v]t + + t1 --> t2 + --------------- + t1 t --> t2 t + + t1 --> t2 + --------------- + v t1 --> v t2 + + t1 ==> t2 + ------------- + ’t1 --> ’t2 +``` + +### Splicing +``` + ~’u ==> u + + t1 ==> t2 + ------------------------------- + (x: T) => t1 ==> (x: T) => t2 + + t1 ==> t2 + --------------- + t1 t ==> t2 t + + t1 ==> t2 + --------------- + u t1 ==> u t2 + + t1 --> t2 + ------------- + ~t1 ==> ~t2 + +``` +## Typing Rules + +Typing judgments are of the form `E1 * E2 |- t: T` where `E1, E2` are environments and +`*` is one of `~` and `’`. +``` + x: T in E2 + --------------- + E1 * E2 |- x: T + + + E1 * E2, x: T1 |- t: T2 + -------------------------------- + E1 * E2 |- (x: T1) => t: T -> T2 + + + E1 * E2 |- t1: T2 -> T E1 * E2 |- t2: T2 + ------------------------------------------- + E1 * E2 |- t1 t2: T + + + E2 ’ E1 |- t: T + ----------------- + E1 ~ E2 |- ’t: ’T + + + E2 ~ E1 |- t: ’T + ---------------- + E1 ’ E2 |- ~t: T +``` + +(Curiously, this looks a bit like a Christmas tree). + +## Soundness + +The meta-theory typically requires mutual inductions over two judgments. + +### Progress Theorem + + 1. If `E1 ~ |- t: T` then either `t = v` for some value `v` or `t --> t2` for some term `t2`. + 2. If ` ’ E2 |- t: T` then either `t = u` for some simple term `u` or `t ==> t2` for some term `t2`. + +Proof by structural induction over terms. + +To prove (1): + + - the cases for variables, lambdas and applications are as in [STLC](https://en.wikipedia.org/wiki/Simply_typed_lambda_calculus). + - If `t = ’t2`, then by inversion we have ` ’ E1 |- t2: T2` for some type `T2`. + By the second [induction hypothesis](https://en.wikipedia.org/wiki/Mathematical_induction) (I.H.), we have one of: + - `t2 = u`, hence `’t2` is a value, + - `t2 ==> t3`, hence `’t2 --> ’t3`. + - The case `t = ~t2` is not typable. + +To prove (2): + + - If `t = x` then `t` is a simple term. + - If `t = (x: T) => t2`, then either `t2` is a simple term, in which case `t` is as well. + Or by the second I.H. `t2 ==> t3`, in which case `t ==> (x: T) => t3`. + - If `t = t1 t2` then one of three cases applies: + + - `t1` and `t2` are a simple term, then `t` is as well a simple term. + - `t1` is not a simple term. Then by the second I.H., `t1 ==> t12`, hence `t ==> t12 t2`. + - `t1` is a simple term but `t2` is not. Then by the second I.H. `t2 ==> t22`, hence `t ==> t1 t22`. + + - The case `t = ’t2` is not typable. + - If `t = ~t2` then by inversion we have `E2 ~ |- t2: ’T2`, for some type `T2`. + By the first I.H., we have one of + + - `t2 = v`. Since `t2: ’T2`, we must have `v = ’u`, for some simple term `u`, hence `t = ~’u`. + By quote-splice reduction, `t ==> u`. + - `t2 --> t3`. Then by the context rule for `’t`, `t ==> ’t3`. + + +### Substitution Lemma + + 1. If `E1 ~ E2 |- s: S` and `E1 ~ E2, x: S |- t: T` then `E1 ~ E2 |- [x := s]t: T`. + 2. If `E1 ~ E2 |- s: S` and `E2, x: S ’ E1 |- t: T` then `E2 ’ E1 |- [x := s]t: T`. + +The proofs are by induction on typing derivations for `t`, analogous +to the proof for STL (with (2) a bit simpler than (1) since we do not +need to swap lambda bindings with the bound variable `x`). The +arguments that link the two hypotheses are as follows. + +To prove (1), let `t = ’t1`. Then `T = ’T1` for some type `T1` and the last typing rule is +``` + E2, x: S ’ E1 |- t1: T1 + ------------------------- + E1 ~ E2, x: S |- ’t1: ’T1 +``` +By the second I.H. `E2 ’ E1 |- [x := s]t1: T1`. By typing, `E1 ~ E2 |- ’[x := s]t1: ’T1`. +Since `[x := s]t = [x := s](’t1) = ’[x := s]t1` we get `[x := s]t: ’T1`. + +To prove (2), let `t = ~t1`. Then the last typing rule is +``` + E1 ~ E2, x: S |- t1: ’T + ----------------------- + E2, x: S ’ E1 |- ~t1: T +``` +By the first I.H., `E1 ~ E2 |- [x := s]t1: ’T`. By typing, `E2 ’ E1 |- ~[x := s]t1: T`. +Since `[x := s]t = [x := s](~t1) = ~[x := s]t1` we get `[x := s]t: T`. + + +### Preservation Theorem + + 1. If `E1 ~ E2 |- t1: T` and `t1 --> t2` then `E1 ~ E2 |- t2: T`. + 2. If `E1 ’ E2 |- t1: T` and `t1 ==> t2` then `E1 ’ E2 |- t2: T`. + +The proof is by structural induction on evaluation derivations. The proof of (1) is analogous +to the proof for STL, using the substitution lemma for the beta reduction case, with the addition of reduction of quoted terms, which goes as follows: + + - Assume the last rule was + ``` + t1 ==> t2 + ------------- + ’t1 --> ’t2 + ``` + By inversion of typing rules, we must have `T = ’T1` for some type `T1` such that `t1: T1`. + By the second I.H., `t2: T1`, hence `’t2: `T1`. + + +To prove (2): + + - Assume the last rule was `~’u ==> u`. The typing proof of `~’u` must have the form + + ``` + E1 ’ E2 |- u: T + ----------------- + E1 ~ E2 |- ’u: ’T + ----------------- + E1 ’ E2 |- ~’u: T + ``` + Hence, `E1 ’ E2 |- u: T`. + + - Assume the last rule was + ``` + t1 ==> t2 + ------------------------------- + (x: S) => t1 ==> (x: T) => t2 + ``` + By typing inversion, `E1 ' E2, x: S |- t1: T1` for some type `T1` such that `T = S -> T1`. + By the I.H, `t2: T1`. By the typing rule for lambdas the result follows. + + - The context rules for applications are equally straightforward. + + - Assume the last rule was + ``` + t1 ==> t2 + ------------- + ~t1 ==> ~t2 + ``` + By inversion of typing rules, we must have `t1: ’T`. + By the first I.H., `t2: ’T`, hence `~t2: T`. diff --git a/docs/_spec/TODOreference/metaprogramming/staging.md b/docs/_spec/TODOreference/metaprogramming/staging.md new file mode 100644 index 000000000000..6d9166e8249e --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/staging.md @@ -0,0 +1,121 @@ +--- +layout: doc-page +title: "Runtime Multi-Stage Programming" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/staging.html +--- + +The framework expresses at the same time compile-time metaprogramming and +multi-stage programming. We can think of compile-time metaprogramming as a +two stage compilation process: one that we write the code in top-level splices, +that will be used for code generation (macros) and one that will perform all +necessary evaluations at compile-time and an object program that we will run +as usual. What if we could synthesize code at run-time and offer one extra stage +to the programmer? Then we can have a value of type `Expr[T]` at run-time that we +can essentially treat as a typed-syntax tree that we can either _show_ as a +string (pretty-print) or compile and run. If the number of quotes exceeds the +number of splices by more than one (effectively handling at run-time values of type +`Expr[Expr[T]]`, `Expr[Expr[Expr[T]]]`, ...) then we talk about Multi-Stage +Programming. + +The motivation behind this _paradigm_ is to let runtime information affect or +guide code-generation. + +Intuition: The phase in which code is run is determined by the difference +between the number of splice scopes and quote scopes in which it is embedded. + + - If there are more splices than quotes, the code is run at compile-time i.e. + as a macro. In the general case, this means running an interpreter that + evaluates the code, which is represented as a typed abstract syntax tree. The + interpreter can fall back to reflective calls when evaluating an application + of a previously compiled method. If the splice excess is more than one, it + would mean that a macro’s implementation code (as opposed to the code it + expands to) invokes other macros. If macros are realized by interpretation, + this would lead to towers of interpreters, where the first interpreter would + itself interpret an interpreter code that possibly interprets another + interpreter and so on. + + - If the number of splices equals the number of quotes, the code is compiled + and run as usual. + + - If the number of quotes exceeds the number of splices, the code is staged. + That is, it produces a typed abstract syntax tree or type structure at + run-time. A quote excess of more than one corresponds to multi-staged + programming. + +Providing an interpreter for the full language is quite difficult, and it is +even more difficult to make that interpreter run efficiently. So we currently +impose the following restrictions on the use of splices. + + 1. A top-level splice must appear in an inline method (turning that method + into a macro) + + 2. The splice must call a previously compiled + method passing quoted arguments, constant arguments or inline arguments. + + 3. Splices inside splices (but no intervening quotes) are not allowed. + + +## API + +The framework as discussed so far allows code to be staged, i.e. be prepared +to be executed at a later stage. To run that code, there is another method +in class `Expr` called `run`. Note that `$` and `run` both map from `Expr[T]` +to `T` but only `$` is subject to the [Cross-Stage Safety](./macros.md#cross-stage-safety), whereas `run` is just a normal method. +`scala.quoted.staging.run` provides a `Quotes` that can be used to show the expression in its scope. +On the other hand `scala.quoted.staging.withQuotes` provides a `Quotes` without evaluating the expression. + +```scala +package scala.quoted.staging + +def run[T](expr: Quotes ?=> Expr[T])(using Compiler): T = ... + +def withQuotes[T](thunk: Quotes ?=> T)(using Compiler): T = ... +``` + +## Create a new Scala 3 project with staging enabled + +```shell +sbt new scala/scala3-staging.g8 +``` + +From [`scala/scala3-staging.g8`](https://github.com/scala/scala3-staging.g8). + +It will create a project with the necessary dependencies and some examples. + +In case you prefer to create the project on your own, make sure to define the following dependency in your [`build.sbt` build definition](https://www.scala-sbt.org/1.x/docs/Basic-Def.html) + +```scala +libraryDependencies += "org.scala-lang" %% "scala3-staging" % scalaVersion.value +``` + +and in case you use `scalac`/`scala` directly, then use the `-with-compiler` flag for both: + +```shell +scalac -with-compiler -d out Test.scala +scala -with-compiler -classpath out Test +``` + +## Example + +Now take exactly the same example as in [Macros](./macros.md). Assume that we +do not want to pass an array statically but generate code at run-time and pass +the value, also at run-time. Note, how we make a future-stage function of type +`Expr[Array[Int] => Int]` in line 6 below. Using `staging.run { ... }` we can evaluate an +expression at runtime. Within the scope of `staging.run` we can also invoke `show` on an expression +to get a source-like representation of the expression. + +```scala +import scala.quoted.* + +// make available the necessary compiler for runtime code generation +given staging.Compiler = staging.Compiler.make(getClass.getClassLoader) + +val f: Array[Int] => Int = staging.run { + val stagedSum: Expr[Array[Int] => Int] = + '{ (arr: Array[Int]) => ${sum('arr)}} + println(stagedSum.show) // Prints "(arr: Array[Int]) => { var sum = 0; ... }" + stagedSum +} + +f.apply(Array(1, 2, 3)) // Returns 6 +``` diff --git a/docs/_spec/TODOreference/metaprogramming/tasty-inspect.md b/docs/_spec/TODOreference/metaprogramming/tasty-inspect.md new file mode 100644 index 000000000000..e643775243e0 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/tasty-inspect.md @@ -0,0 +1,57 @@ +--- +layout: doc-page +title: "TASTy Inspection" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/tasty-inspect.html +--- + +```scala +libraryDependencies += "org.scala-lang" %% "scala3-tasty-inspector" % scalaVersion.value +``` + +TASTy files contain the full typed tree of a class including source positions +and documentation. This is ideal for tools that analyze or extract semantic +information from the code. To avoid the hassle of working directly with the TASTy +file we provide the `Inspector` which loads the contents and exposes it +through the TASTy reflect API. + +## Inspecting TASTy files + +To inspect the trees of a TASTy file a consumer can be defined in the following way. + +```scala +import scala.quoted.* +import scala.tasty.inspector.* + +class MyInspector extends Inspector: + def inspect(using Quotes)(tastys: List[Tasty[quotes.type]]): Unit = + import quotes.reflect.* + for tasty <- tastys do + val tree = tasty.ast + // Do something with the tree +``` + +Then the consumer can be instantiated with the following code to get the tree of the `foo/Bar.tasty` file. + +```scala +object Test: + def main(args: Array[String]): Unit = + val tastyFiles = List("foo/Bar.tasty") + TastyInspector.inspectTastyFiles(tastyFiles)(new MyInspector) +``` + +Note that if we need to run the main (in the example below defined in an object called `Test`) after compilation we need to make the compiler available to the runtime: + +```shell +scalac -d out Test.scala +scala -with-compiler -classpath out Test +``` + +## Template project + +Using sbt version `1.1.5+`, do: + +```shell +sbt new scala/scala3-tasty-inspector.g8 +``` + +in the folder where you want to clone the template. diff --git a/docs/_spec/TODOreference/new-types/dependent-function-types-spec.md b/docs/_spec/TODOreference/new-types/dependent-function-types-spec.md new file mode 100644 index 000000000000..f3237ddf7b9a --- /dev/null +++ b/docs/_spec/TODOreference/new-types/dependent-function-types-spec.md @@ -0,0 +1,125 @@ +--- +layout: doc-page +title: "Dependent Function Types - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/dependent-function-types-spec.html +--- + +Initial implementation in [PR #3464](https://github.com/lampepfl/dotty/pull/3464). + +## Syntax + +``` +FunArgTypes ::= InfixType + | ‘(’ [ FunArgType {',' FunArgType } ] ‘)’ + | ‘(’ TypedFunParam {',' TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +``` + +Dependent function types associate to the right, e.g. +`(s: S) => (t: T) => U` is the same as `(s: S) => ((t: T) => U)`. + +## Implementation + +Dependent function types are shorthands for class types that define `apply` +methods with a dependent result type. Dependent function types desugar to +refinement types of `scala.FunctionN`. A dependent function type +`(x1: K1, ..., xN: KN) => R` of arity `N` translates to: + +```scala +FunctionN[K1, ..., Kn, R']: + def apply(x1: K1, ..., xN: KN): R +``` + +where the result type parameter `R'` is the least upper approximation of the +precise result type `R` without any reference to value parameters `x1, ..., xN`. + +The syntax and semantics of anonymous dependent functions is identical to the +one of regular functions. Eta expansion is naturally generalized to produce +dependent function types for methods with dependent result types. + +Dependent functions can be implicit, and generalize to arity `N > 22` in the +same way that other functions do, see +[the corresponding documentation](../dropped-features/limit22.md). + +## Examples + +The example below defines a trait `C` and the two dependent function types +`DF` and `IDF` and prints the results of the respective function applications: + +[depfuntype.scala]: https://github.com/lampepfl/dotty/blob/main/tests/pos/depfuntype.scala + +```scala +trait C { type M; val m: M } + +type DF = (x: C) => x.M + +type IDF = (x: C) ?=> x.M + +@main def test = + val c = new C { type M = Int; val m = 3 } + + val depfun: DF = (x: C) => x.m + val t = depfun(c) + println(s"t=$t") // prints "t=3" + + val idepfun: IDF = summon[C].m + val u = idepfun(using c) + println(s"u=$u") // prints "u=3" + +``` + +In the following example the depend type `f.Eff` refers to the effect type `CanThrow`: + +[eff-dependent.scala]: https://github.com/lampepfl/dotty/blob/main/tests/run/eff-dependent.scala + +```scala +trait Effect + +// Type X => Y +abstract class Fun[-X, +Y]: + type Eff <: Effect + def apply(x: X): Eff ?=> Y + +class CanThrow extends Effect +class CanIO extends Effect + +given ct: CanThrow = new CanThrow +given ci: CanIO = new CanIO + +class I2S extends Fun[Int, String]: + type Eff = CanThrow + def apply(x: Int) = x.toString + +class S2I extends Fun[String, Int]: + type Eff = CanIO + def apply(x: String) = x.length + +// def map(f: A => B)(xs: List[A]): List[B] +def map[A, B](f: Fun[A, B])(xs: List[A]): f.Eff ?=> List[B] = + xs.map(f.apply) + +// def mapFn[A, B]: (A => B) -> List[A] -> List[B] +def mapFn[A, B]: (f: Fun[A, B]) => List[A] => f.Eff ?=> List[B] = + f => xs => map(f)(xs) + +// def compose(f: A => B)(g: B => C)(x: A): C +def compose[A, B, C](f: Fun[A, B])(g: Fun[B, C])(x: A): + f.Eff ?=> g.Eff ?=> C = + g(f(x)) + +// def composeFn: (A => B) -> (B => C) -> A -> C +def composeFn[A, B, C]: + (f: Fun[A, B]) => (g: Fun[B, C]) => A => f.Eff ?=> g.Eff ?=> C = + f => g => x => compose(f)(g)(x) + +@main def test = + val i2s = new I2S + val s2i = new S2I + + assert(mapFn(i2s)(List(1, 2, 3)).mkString == "123") + assert(composeFn(i2s)(s2i)(22) == 2) +``` + +## Type Checking + +After desugaring no additional typing rules are required for dependent function types. diff --git a/docs/_spec/TODOreference/new-types/dependent-function-types.md b/docs/_spec/TODOreference/new-types/dependent-function-types.md new file mode 100644 index 000000000000..adbee1d8b3c8 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/dependent-function-types.md @@ -0,0 +1,49 @@ +--- +layout: doc-page +title: "Dependent Function Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/dependent-function-types.html +--- + +A dependent function type is a function type whose result depends +on the function's parameters. For example: + +```scala +trait Entry { type Key; val key: Key } + +def extractKey(e: Entry): e.Key = e.key // a dependent method + +val extractor: (e: Entry) => e.Key = extractKey // a dependent function value +// ^^^^^^^^^^^^^^^^^^^ +// a dependent function type +``` + +Scala already has _dependent methods_, i.e. methods where the result +type refers to some of the parameters of the method. Method +`extractKey` is an example. Its result type, `e.Key` refers to its +parameter `e` (we also say, `e.Key` _depends_ on `e`). But so far it +was not possible to turn such methods into function values, so that +they can be passed as parameters to other functions, or returned as +results. Dependent methods could not be turned into functions simply +because there was no type that could describe them. + +In Scala 3 this is now possible. The type of the `extractor` value above is + +```scala +(e: Entry) => e.Key +``` + +This type describes function values that take any argument `e` of type +`Entry` and return a result of type `e.Key`. + +Recall that a normal function type `A => B` is represented as an +instance of the [`Function1` trait](https://scala-lang.org/api/3.x/scala/Function1.html) +(i.e. `Function1[A, B]`) and analogously for functions with more parameters. Dependent functions +are also represented as instances of these traits, but they get an additional +refinement. In fact, the dependent function type above is just syntactic sugar for + +```scala +Function1[Entry, Entry#Key]: + def apply(e: Entry): e.Key +``` + +[More details](./dependent-function-types-spec.md) diff --git a/docs/_spec/TODOreference/new-types/match-types.md b/docs/_spec/TODOreference/new-types/match-types.md new file mode 100644 index 000000000000..d646dd11880b --- /dev/null +++ b/docs/_spec/TODOreference/new-types/match-types.md @@ -0,0 +1,247 @@ +--- +layout: doc-page +title: "Match Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/match-types.html +--- + +A match type reduces to one of its right-hand sides, depending on the type of +its scrutinee. For example: + +```scala +type Elem[X] = X match + case String => Char + case Array[t] => t + case Iterable[t] => t +``` + +This defines a type that reduces as follows: + +```scala +Elem[String] =:= Char +Elem[Array[Int]] =:= Int +Elem[List[Float]] =:= Float +Elem[Nil.type] =:= Nothing +``` + +Here `=:=` is understood to mean that left and right-hand sides are mutually +subtypes of each other. + +In general, a match type is of the form + +```scala +S match { P1 => T1 ... Pn => Tn } +``` + +where `S`, `T1`, ..., `Tn` are types and `P1`, ..., `Pn` are type patterns. Type +variables in patterns start with a lower case letter, as usual. + +Match types can form part of recursive type definitions. Example: + +```scala +type LeafElem[X] = X match + case String => Char + case Array[t] => LeafElem[t] + case Iterable[t] => LeafElem[t] + case AnyVal => X +``` + +Recursive match type definitions can also be given an upper bound, like this: + +```scala +type Concat[Xs <: Tuple, +Ys <: Tuple] <: Tuple = Xs match + case EmptyTuple => Ys + case x *: xs => x *: Concat[xs, Ys] +``` + +In this definition, every instance of `Concat[A, B]`, whether reducible or not, +is known to be a subtype of `Tuple`. This is necessary to make the recursive +invocation `x *: Concat[xs, Ys]` type check, since `*:` demands a `Tuple` as its +right operand. + +## Dependent Typing + +Match types can be used to define dependently typed methods. For instance, here +is the value level counterpart to the `LeafElem` type defined above (note the +use of the match type as the return type): + +```scala +def leafElem[X](x: X): LeafElem[X] = x match + case x: String => x.charAt(0) + case x: Array[t] => leafElem(x(0)) + case x: Iterable[t] => leafElem(x.head) + case x: AnyVal => x +``` + +This special mode of typing for match expressions is only used when the +following conditions are met: + +1. The match expression patterns do not have guards +2. The match expression scrutinee's type is a subtype of the match type + scrutinee's type +3. The match expression and the match type have the same number of cases +4. The match expression patterns are all [Typed Patterns](https://scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#typed-patterns), + and these types are `=:=` to their corresponding type patterns in the match + type + +So you know, while the case body will be expected to have the type on the right-hand +side of the corresponding match type case, that doesn't imply the match type argument +is constrained. Using the example, the last case body must conform to X, but that +doesn't constrain X to be AnyVal, and therefore a LeafElem[X] inside the body wouldn't +reduce; it would remain stuck, and as such just an abstract type. + +## Representation of Match Types + +The internal representation of a match type +``` +S match { P1 => T1 ... Pn => Tn } +``` +is `Match(S, C1, ..., Cn) <: B` where each case `Ci` is of the form +``` +[Xs] =>> P => T +``` + +Here, `[Xs]` is a type parameter clause of the variables bound in pattern `Pi`. +If there are no bound type variables in a case, the type parameter clause is +omitted and only the function type `P => T` is kept. So each case is either a +unary function type or a type lambda over a unary function type. + +`B` is the declared upper bound of the match type, or `Any` if no such bound is +given. We will leave it out in places where it does not matter for the +discussion. The scrutinee, bound, and pattern types must all be first-order +types. + +## Match Type Reduction + +Match type reduction follows the semantics of match expressions, that is, a +match type of the form `S match { P1 => T1 ... Pn => Tn }` reduces to `Ti` if +and only if `s: S match { _: P1 => T1 ... _: Pn => Tn }` evaluates to a value of +type `Ti` for all `s: S`. + +The compiler implements the following reduction algorithm: + +- If the scrutinee type `S` is an empty set of values (such as `Nothing` or + `String & Int`), do not reduce. +- Sequentially consider each pattern `Pi` + - If `S <: Pi` reduce to `Ti`. + - Otherwise, try constructing a proof that `S` and `Pi` are disjoint, or, in + other words, that no value `s` of type `S` is also of type `Pi`. + - If such proof is found, proceed to the next case (`Pi+1`), otherwise, do + not reduce. + +Disjointness proofs rely on the following properties of Scala types: + +1. Single inheritance of classes +2. Final classes cannot be extended +3. Constant types with distinct values are nonintersecting +4. Singleton paths to distinct values are nonintersecting, such as `object` definitions or singleton enum cases. + +Type parameters in patterns are minimally instantiated when computing `S <: Pi`. +An instantiation `Is` is _minimal_ for `Xs` if all type variables in `Xs` that +appear covariantly and nonvariantly in `Is` are as small as possible and all +type variables in `Xs` that appear contravariantly in `Is` are as large as +possible. Here, "small" and "large" are understood with respect to `<:`. + +For simplicity, we have omitted constraint handling so far. The full formulation +of subtyping tests describes them as a function from a constraint and a pair of +types to either _success_ and a new constraint or _failure_. In the context of +reduction, the subtyping test `S <: [Xs := Is] P` is understood to leave the +bounds of all variables in the input constraint unchanged, i.e. existing +variables in the constraint cannot be instantiated by matching the scrutinee +against the patterns. + +## Subtyping Rules for Match Types + +The following rules apply to match types. For simplicity, we omit environments +and constraints. + +1. The first rule is a structural comparison between two match types: + + ``` + S match { P1 => T1 ... Pm => Tm } <: T match { Q1 => U1 ... Qn => Un } + ``` + + if + + ``` + S =:= T, m >= n, Pi =:= Qi and Ti <: Ui for i in 1..n + ``` + + I.e. scrutinees and patterns must be equal and the corresponding bodies must + be subtypes. No case re-ordering is allowed, but the subtype can have more + cases than the supertype. + +2. The second rule states that a match type and its redux are mutual subtypes. + + ``` + S match { P1 => T1 ... Pn => Tn } <: U + U <: S match { P1 => T1 ... Pn => Tn } + ``` + + if + + `S match { P1 => T1 ... Pn => Tn }` reduces to `U` + +3. The third rule states that a match type conforms to its upper bound: + + ``` + (S match { P1 => T1 ... Pn => Tn } <: B) <: B + ``` + +## Termination + +Match type definitions can be recursive, which means that it's possible to run +into an infinite loop while reducing match types. + +Since reduction is linked to subtyping, we already have a cycle detection +mechanism in place. As a result, the following will already give a reasonable +error message: + +```scala +type L[X] = X match + case Int => L[X] + +def g[X]: L[X] = ??? +``` + +```scala + | val x: Int = g[Int] + | ^ + |Recursion limit exceeded. + |Maybe there is an illegal cyclic reference? + |If that's not the case, you could also try to + |increase the stacksize using the -Xss JVM option. + |A recurring operation is (inner to outer): + | + | subtype LazyRef(Test.L[Int]) <:< Int +``` + +Internally, the Scala compiler detects these cycles by turning selected stack overflows into +type errors. If there is a stack overflow during subtyping, the exception will +be caught and turned into a compile-time error that indicates a trace of the +subtype tests that caused the overflow without showing a full stack trace. + + +## Match Types Variance + +All type positions in a match type (scrutinee, patterns, bodies) are considered invariant. + +## Related Work + +Match types have similarities with +[closed type families](https://wiki.haskell.org/GHC/Type_families) in Haskell. +Some differences are: + +- Subtyping instead of type equalities. +- Match type reduction does not tighten the underlying constraint, whereas type + family reduction does unify. This difference in approach mirrors the + difference between local type inference in Scala and global type inference in + Haskell. + +Match types are also similar to Typescript's +[conditional types](https://github.com/Microsoft/TypeScript/pull/21316). The +main differences here are: + + - Conditional types only reduce if both the scrutinee and pattern are ground, + whereas match types also work for type parameters and abstract types. + - Match types support direct recursion. + - Conditional types distribute through union types. diff --git a/docs/_spec/TODOreference/new-types/new-types.md b/docs/_spec/TODOreference/new-types/new-types.md new file mode 100644 index 000000000000..84c157495d6f --- /dev/null +++ b/docs/_spec/TODOreference/new-types/new-types.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "New Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/index.html +--- + +This chapter documents the new types introduced in Scala 3. diff --git a/docs/_spec/TODOreference/new-types/polymorphic-function-types.md b/docs/_spec/TODOreference/new-types/polymorphic-function-types.md new file mode 100644 index 000000000000..1754bf844831 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/polymorphic-function-types.md @@ -0,0 +1,94 @@ +--- +layout: doc-page +title: "Polymorphic Function Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/polymorphic-function-types.html +--- + +A polymorphic function type is a function type which accepts type parameters. +For example: + +```scala +// A polymorphic method: +def foo[A](xs: List[A]): List[A] = xs.reverse + +// A polymorphic function value: +val bar: [A] => List[A] => List[A] +// ^^^^^^^^^^^^^^^^^^^^^^^^^ +// a polymorphic function type + = [A] => (xs: List[A]) => foo[A](xs) +``` + +Scala already has _polymorphic methods_, i.e. methods which accepts type parameters. +Method `foo` above is an example, accepting a type parameter `A`. +So far, it +was not possible to turn such methods into polymorphic function values like `bar` above, +which can be passed as parameters to other functions, or returned as results. + +In Scala 3 this is now possible. The type of the `bar` value above is + +```scala +[A] => List[A] => List[A] +``` + +This type describes function values which take a type `A` as a parameter, +then take a list of type `List[A]`, and return a list of the same type `List[A]`. + +[More details](https://github.com/lampepfl/dotty/pull/4672) + + +## Example Usage + +Polymorphic function type are particularly useful +when callers of a method are required to provide a +function which has to be polymorphic, +meaning that it should accept arbitrary types as part of its inputs. + +For instance, consider the situation where we have +a data type to represent the expressions of a simple language +(consisting only of variables and function applications) +in a strongly-typed way: + +```scala +enum Expr[A]: + case Var(name: String) + case Apply[A, B](fun: Expr[B => A], arg: Expr[B]) extends Expr[A] +``` + +We would like to provide a way for users to map a function +over all immediate subexpressions of a given `Expr`. +This requires the given function to be polymorphic, +since each subexpression may have a different type. +Here is how to implement this using polymorphic function types: + +```scala +def mapSubexpressions[A](e: Expr[A])(f: [B] => Expr[B] => Expr[B]): Expr[A] = + e match + case Apply(fun, arg) => Apply(f(fun), f(arg)) + case Var(n) => Var(n) +``` + +And here is how to use this function to _wrap_ each subexpression +in a given expression with a call to some `wrap` function, +defined as a variable: + +```scala +val e0 = Apply(Var("f"), Var("a")) +val e1 = mapSubexpressions(e0)( + [B] => (se: Expr[B]) => Apply(Var[B => B]("wrap"), se)) +println(e1) // Apply(Apply(Var(wrap),Var(f)),Apply(Var(wrap),Var(a))) +``` + +## Relationship With Type Lambdas + +Polymorphic function types are not to be confused with +[_type lambdas_](type-lambdas.md). +While the former describes the _type_ of a polymorphic _value_, +the latter is an actual function value _at the type level_. + +A good way of understanding the difference is to notice that +**_type lambdas are applied in types, +whereas polymorphic functions are applied in terms_**: +One would call the function `bar` above +by passing it a type argument `bar[Int]` _within a method body_. +On the other hand, given a type lambda such as `type F = [A] =>> List[A]`, +one would call `F` _within a type expression_, as in `type Bar = F[Int]`. diff --git a/docs/_spec/TODOreference/other-new-features/control-syntax.md b/docs/_spec/TODOreference/other-new-features/control-syntax.md new file mode 100644 index 000000000000..92204690f0b7 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/control-syntax.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: New Control Syntax +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/control-syntax.html +--- + +Scala 3 has a new "quiet" syntax for control expressions that does not rely on +enclosing the condition in parentheses, and also allows to drop parentheses or braces +around the generators of a `for`-expression. Examples: +```scala +if x < 0 then + "negative" +else if x == 0 then + "zero" +else + "positive" + +if x < 0 then -x else x + +while x >= 0 do x = f(x) + +for x <- xs if x > 0 +yield x * x + +for + x <- xs + y <- ys +do + println(x + y) + +try body +catch case ex: IOException => handle +``` + +The rules in detail are: + + - The condition of an `if`-expression can be written without enclosing parentheses if it is followed by a `then`. + - The condition of a `while`-loop can be written without enclosing parentheses if it is followed by a `do`. + - The enumerators of a `for`-expression can be written without enclosing parentheses or braces if they are followed by a `yield` or `do`. + - A `do` in a `for`-expression expresses a `for`-loop. + - A `catch` can be followed by a single case on the same line. + If there are multiple cases, these have to appear within braces (just like in Scala 2) + or an indented block. +## Rewrites + +The Scala 3 compiler can rewrite source code from old syntax to new syntax and back. +When invoked with options `-rewrite -new-syntax` it will rewrite from old to new syntax, dropping parentheses and braces in conditions and enumerators. When invoked with options `-rewrite -old-syntax` it will rewrite in the reverse direction, inserting parentheses and braces as needed. diff --git a/docs/_spec/TODOreference/other-new-features/creator-applications.md b/docs/_spec/TODOreference/other-new-features/creator-applications.md new file mode 100644 index 000000000000..81f09d897955 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/creator-applications.md @@ -0,0 +1,57 @@ +--- +layout: doc-page +title: "Universal Apply Methods" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/creator-applications.html +--- + +Scala case classes generate apply methods, so that values of case classes can be created using simple +function application, without needing to write `new`. + +Scala 3 generalizes this scheme to all concrete classes. Example: + +```scala +class StringBuilder(s: String): + def this() = this("") + +StringBuilder("abc") // old: new StringBuilder("abc") +StringBuilder() // old: new StringBuilder() +``` + +This works since a companion object with two `apply` methods +is generated together with the class. The object looks like this: + +```scala +object StringBuilder: + inline def apply(s: String): StringBuilder = new StringBuilder(s) + inline def apply(): StringBuilder = new StringBuilder() +``` + +The synthetic object `StringBuilder` and its `apply` methods are called _constructor proxies_. +Constructor proxies are generated even for Java classes and classes coming from Scala 2. +The precise rules are as follows: + + 1. A constructor proxy companion object `object C` is created for a concrete class `C`, + provided the class does not have already a companion, and there is also no other value + or method named `C` defined or inherited in the scope where `C` is defined. + + 2. Constructor proxy `apply` methods are generated for a concrete class provided + + - the class has a companion object (which might have been generated in step 1), and + - that companion object does not already define a member named `apply`. + + Each generated `apply` method forwards to one constructor of the class. It has the + same type and value parameters as the constructor. + +Constructor proxy companions cannot be used as values by themselves. A proxy companion object must +be selected with `apply` (or be applied to arguments, in which case the `apply` is implicitly +inserted). + +Constructor proxies are also not allowed to shadow normal definitions. That is, +if an identifier resolves to a constructor proxy, and the same identifier is also +defined or imported in some other scope, an ambiguity is reported. + +## Motivation + +Leaving out `new` hides an implementation detail and makes code more pleasant to read. Even though +it requires a new rule, it will likely increase the perceived regularity of the language, since case +classes already provide function call creation syntax (and are often defined for this reason alone). diff --git a/docs/_spec/TODOreference/other-new-features/experimental-defs.md b/docs/_spec/TODOreference/other-new-features/experimental-defs.md new file mode 100644 index 000000000000..225b61161652 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/experimental-defs.md @@ -0,0 +1,318 @@ +--- +layout: doc-page +title: "Experimental Definitions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/experimental-defs.html +--- + +The [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) annotation allows the definition of an API that is not guaranteed backward binary or source compatibility. +This annotation can be placed on term or type definitions. + +## References to experimental definitions + +Experimental definitions can only be referenced in an experimental scope. Experimental scopes are defined as follows: + +1. The RHS of an experimental `def`, `val`, `var`, `given` or `type` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental + def x = () + + def d1 = x // error: value x is marked @experimental and therefore ... + @experimental def d2 = x + + val v1 = x // error: value x is marked @experimental and therefore ... + @experimental val v2 = x + + var vr1 = x // error: value x is marked @experimental and therefore ... + @experimental var vr2 = x + + lazy val lv1 = x // error: value x is marked @experimental and therefore ... + @experimental lazy val lv2 = x + ``` +
+ +
+ Example 2 + + ```scala + import scala.annotation.experimental + + @experimental + val x = () + + @experimental + def f() = () + + @experimental + object X: + def fx() = 1 + + def test1: Unit = + f() // error: def f is marked @experimental and therefore ... + x // error: value x is marked @experimental and therefore ... + X.fx() // error: object X is marked @experimental and therefore ... + import X.fx + fx() // error: object X is marked @experimental and therefore ... + + @experimental + def test2: Unit = + // references to f, x and X are ok because `test2` is experimental + f() + x + X.fx() + import X.fx + fx() + ``` +
+ +
+ Example 3 + + ```scala + import scala.annotation.experimental + + @experimental type E + + type A = E // error type E is marked @experimental and therefore ... + @experimental type B = E + ``` +
+ +
+ Example 4 + + ```scala + import scala.annotation.experimental + + @experimental class A + @experimental type X + @experimental type Y = Int + @experimental opaque type Z = Int + + def test: Unit = + new A // error: class A is marked @experimental and therefore ... + val i0: A = ??? // error: class A is marked @experimental and therefore ... + val i1: X = ??? // error: type X is marked @experimental and therefore ... + val i2: Y = ??? // error: type Y is marked @experimental and therefore ... + val i2: Z = ??? // error: type Y is marked @experimental and therefore ... + () + ``` +
+ +
+ Example 5 + + ```scala + @experimental + trait ExpSAM { + def foo(x: Int): Int + } + def bar(f: ExpSAM): Unit = {} // error: error form rule 2 + + def test: Unit = + bar(x => x) // error: reference to experimental SAM + () + ``` +
+ +2. The signatures of an experimental `def`, `val`, `var`, `given` and `type`, or constructors of `class` and `trait` are experimental scopes. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + @experimental class A + @experimental type X + @experimental type Y = Int + @experimental opaque type Z = Int + + def test1( + p1: A, // error: class A is marked @experimental and therefore ... + p2: List[A], // error: class A is marked @experimental and therefore ... + p3: X, // error: type X is marked @experimental and therefore ... + p4: Y, // error: type Y is marked @experimental and therefore ... + p5: Z, // error: type Z is marked @experimental and therefore ... + p6: Any = x // error: def x is marked @experimental and therefore ... + ): A = ??? // error: class A is marked @experimental and therefore ... + + @experimental def test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ): A = ??? + + class Test1( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error + p6: Any = x // error + ) {} + + @experimental class Test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ) {} + + trait Test1( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error + p6: Any = x // error + ) {} + + @experimental trait Test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ) {} + ``` +
+ +3. The `extends` clause of an experimental `class`, `trait` or `object` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + + @experimental class A1(x: Any) + class A2(x: Any) + + + @experimental class B1 extends A1(1) + class B2 extends A1(1) // error: class A1 is marked @experimental and therefore marked @experimental and therefore ... + + @experimental class C1 extends A2(x) + class C2 extends A2(x) // error def x is marked @experimental and therefore + ``` +
+ +4. The body of an experimental `class`, `trait` or `object` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + + @experimental class A { + def f = x // ok because A is experimental + } + + @experimental class B { + def f = x // ok because A is experimental + } + + @experimental object C { + def f = x // ok because A is experimental + } + + @experimental class D { + def f = { + object B { + x // ok because A is experimental + } + } + } + ``` + +
+ +5. Annotations of an experimental definition are in experimental scopes. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental class myExperimentalAnnot extends scala.annotation.Annotation + + @myExperimentalAnnot // error + def test: Unit = () + + @experimental + @myExperimentalAnnot + def test: Unit = () + ``` + +
+ +6. Any code compiled using a [_Nightly_](https://search.maven.org/artifact/org.scala-lang/scala3-compiler_3) or _Snapshot_ version of the compiler is considered to be in an experimental scope. +Can use the `-Yno-experimental` compiler flag to disable it and run as a proper release. + +In any other situation, a reference to an experimental definition will cause a compilation error. + +## Experimental inheritance + +All subclasses of an experimental `class` or `trait` must be marked as [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) even if they are in an experimental scope. +Anonymous classes and SAMs of experimental classes are considered experimental. + +We require explicit annotations to make sure we do not have completion or cycles issues with nested classes. This restriction could be relaxed in the future. + +## Experimental overriding + +For an overriding member `M` and overridden member `O`, if `O` is non-experimental then `M` must be non-experimental. + +This makes sure that we cannot have accidental binary incompatibilities such as the following change. +```diff +class A: + def f: Any = 1 +class B extends A: +- @experimental def f: Int = 2 +``` + +## Test frameworks + +Tests can be defined as experimental. Tests frameworks can execute tests using reflection even if they are in an experimental class, object or method. Examples: + +
+Example 1 + +Test that touch experimental APIs can be written as follows + +```scala +import scala.annotation.experimental + +@experimental def x = 2 + +class MyTests { + /*@Test*/ def test1 = x // error + @experimental /*@Test*/ def test2 = x +} + +@experimental +class MyExperimentalTests { + /*@Test*/ def test1 = x + /*@Test*/ def test2 = x +} +``` + +
diff --git a/docs/_spec/TODOreference/other-new-features/export.md b/docs/_spec/TODOreference/other-new-features/export.md new file mode 100644 index 000000000000..40e2ad9df248 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/export.md @@ -0,0 +1,234 @@ +--- +layout: doc-page +title: "Export Clauses" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/export.html +--- + +An export clause defines aliases for selected members of an object. Example: + +```scala +class BitMap +class InkJet + +class Printer: + type PrinterType + def print(bits: BitMap): Unit = ??? + def status: List[String] = ??? + +class Scanner: + def scan(): BitMap = ??? + def status: List[String] = ??? + +class Copier: + private val printUnit = new Printer { type PrinterType = InkJet } + private val scanUnit = new Scanner + + export scanUnit.scan + export printUnit.{status as _, *} + + def status: List[String] = printUnit.status ++ scanUnit.status +``` + +The two `export` clauses define the following _export aliases_ in class `Copier`: + +```scala +final def scan(): BitMap = scanUnit.scan() +final def print(bits: BitMap): Unit = printUnit.print(bits) +final type PrinterType = printUnit.PrinterType +``` + +They can be accessed inside `Copier` as well as from outside: + +```scala +val copier = new Copier +copier.print(copier.scan()) +``` + +An `export` clause has the same format as an import clause. Its general form is: + +```scala +export path . { sel_1, ..., sel_n } +``` + +It consists of a qualifier expression `path`, which must be a stable identifier, followed by +one or more selectors `sel_i` that identify what gets an alias. Selectors can be +of one of the following forms: + + - A _simple selector_ `x` creates aliases for all eligible members of `path` that are named `x`. + - A _renaming selector_ `x as y` creates aliases for all eligible members of `path` that are named `x`, but the alias is named `y` instead of `x`. + - An _omitting selector_ `x as _` prevents `x` from being aliased by a subsequent + wildcard selector. + - A _given selector_ `given x` has an optional type bound `x`. It creates aliases for all eligible given instances that conform to either `x`, or `Any` if `x` is omitted, except for members that are named by a previous simple, renaming, or omitting selector. + - A _wildcard selector_ `*` creates aliases for all eligible members of `path` except for given instances, + synthetic members generated by the compiler and those members that are named by a previous simple, renaming, or omitting selector. + \ + Notes: + - eligible construtor proxies are also included, even though they are synthetic members. + - members created by an export are also included. They are created by the compiler, but are not considered synthetic. + +A member is _eligible_ if all of the following holds: + + - its owner is not a base class of the class[(\*)](#note_class) containing the export clause, + - the member does not override a concrete definition that has as owner + a base class of the class containing the export clause. + - it is accessible at the export clause, + - it is not a constructor, nor the (synthetic) class part of an object, + - it is a given instance (declared with `given`) if and only if the export is from a _given selector_. + +It is a compile-time error if a simple or renaming selector does not identify +any eligible members. + +It is a compile-time error if a simple or renaming selector does not identify any eligible members. + +Type members are aliased by type definitions, and term members are aliased by method definitions. For instance: +```scala +object O: + class C(val x: Int) + def m(c: C): Int = c.x + 1 +export O.* + // generates + // type C = O.C + // def m(c: O.C): Int = O.m(c) +``` + +Export aliases copy the type and value parameters of the members they refer to. +Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: + + - Export aliases cannot be overridden, since they are final. + - Export aliases cannot override concrete members in base classes, since they are + not marked `override`. + - However, export aliases can implement deferred members of base classes. + +Export aliases for public value definitions that are accessed without +referring to private values in the qualifier path +are marked by the compiler as "stable" and their result types are the singleton types of the aliased definitions. This means that they can be used as parts of stable identifier paths, even though they are technically methods. For instance, the following is OK: +```scala +class C { type T } +object O { val c: C = ... } +export O.c +def f: c.T = ... +``` + + +**Restrictions:** + + 1. Export clauses can appear in classes or they can appear at the top-level. An export clause cannot appear as a statement in a block. + 1. If an export clause contains a wildcard or given selector, it is forbidden for its qualifier path to refer to a package. This is because it is not yet known how to safely track wildcard dependencies to a package for the purposes of incremental compilation. + 1. An export renaming hides un-renamed exports matching the target name. For instance, the following + clause would be invalid since `B` is hidden by the renaming `A as B`. + ```scala + export {A as B, B} // error: B is hidden + ``` + + 1. Renamings in an export clause must have pairwise different target names. For instance, the following clause would be invalid: + ```scala + export {A as C, B as C} // error: duplicate renaming + + 1. Simple renaming exports like + ```scala + export status as stat + ``` + are not supported yet. They would run afoul of the restriction that the + exported `a` cannot be already a member of the object containing the export. + This restriction might be lifted in the future. + + +(\*) **Note:** Unless otherwise stated, the term "class" in this discussion also includes object and trait definitions. + +## Motivation + +It is a standard recommendation to prefer composition over inheritance. This is really an application of the principle of least power: Composition treats components as blackboxes whereas inheritance can affect the internal workings of components through overriding. Sometimes the close coupling implied by inheritance is the best solution for a problem, but where this is not necessary the looser coupling of composition is better. + +So far, object-oriented languages including Scala made it much easier to use inheritance than composition. Inheritance only requires an `extends` clause whereas composition required a verbose elaboration of a sequence of forwarders. So in that sense, object-oriented languages are pushing +programmers to a solution that is often too powerful. Export clauses redress the balance. They make composition relationships as concise and easy to express as inheritance relationships. Export clauses also offer more flexibility than extends clauses since members can be renamed or omitted. + +Export clauses also fill a gap opened by the shift from package objects to top-level definitions. One occasionally useful idiom that gets lost in this shift is a package object inheriting from some class. The idiom is often used in a facade like pattern, to make members +of internal compositions available to users of a package. Top-level definitions are not wrapped in a user-defined object, so they can't inherit anything. However, top-level definitions can be export clauses, which supports the facade design pattern in a safer and +more flexible way. + +## Export Clauses in Extensions + +An export clause may also appear in an extension. + +Example: +```scala +class StringOps(x: String): + def *(n: Int): String = ... + def capitalize: String = ... + +extension (x: String) + def take(n: Int): String = x.substring(0, n) + def drop(n: Int): String = x.substring(n) + private def moreOps = new StringOps(x) + export moreOps.* +``` +In this case the qualifier expression must be an identifier that refers to a unique parameterless extension method in the same extension clause. The export will create +extension methods for all accessible term members +in the result of the qualifier path. For instance, the extension above would be expanded to +```scala +extension (x: String) + def take(n: Int): String = x.substring(0, n) + def drop(n: Int): String = x.substring(n) + private def moreOps = StringOps(x) + def *(n: Int): String = moreOps.*(n) + def capitalize: String = moreOps.capitalize +``` + +## Syntax changes: + +``` +TemplateStat ::= ... + | Export +TopStat ::= ... + | Export +ExtMethod ::= ... + | Export +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*’ | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +``` + +## Elaboration of Export Clauses + +Export clauses raise questions about the order of elaboration during type checking. +Consider the following example: + +```scala +class B { val c: Int } +object a { val b = new B } +export a.* +export b.* +``` + +Is the `export b.*` clause legal? If yes, what does it export? Is it equivalent to `export a.b.*`? What about if we swap the last two clauses? + +``` +export b.* +export a.* +``` + +To avoid tricky questions like these, we fix the elaboration order of exports as follows. + +Export clauses are processed when the type information of the enclosing object or class is completed. Completion so far consisted of the following steps: + + 1. Elaborate any annotations of the class. + 2. Elaborate the parameters of the class. + 3. Elaborate the self type of the class, if one is given. + 4. Enter all definitions of the class as class members, with types to be completed + on demand. + 5. Determine the types of all parents of the class. + + With export clauses, the following steps are added: + + 6. Compute the types of all paths in export clauses. + 7. Enter export aliases for the eligible members of all paths in export clauses. + +It is important that steps 6 and 7 are done in sequence: We first compute the types of _all_ +paths in export clauses and only after this is done we enter any export aliases as class members. This means that a path of an export clause cannot refer to an alias made available +by another export clause of the same class. diff --git a/docs/_spec/TODOreference/other-new-features/indentation.md b/docs/_spec/TODOreference/other-new-features/indentation.md new file mode 100644 index 000000000000..e931030ab696 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/indentation.md @@ -0,0 +1,509 @@ +--- +layout: doc-page +title: "Optional Braces" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/indentation.html +--- + +Scala 3 enforces some rules on indentation and allows some occurrences of braces `{...}` to be optional: + +- First, some badly indented programs are flagged with warnings. +- Second, some occurrences of braces `{...}` are made optional. Generally, the rule + is that adding a pair of optional braces will not change the meaning of a well-indented program. + +These changes can be turned off with the compiler flag `-no-indent`. + +## Indentation Rules + +The compiler enforces two rules for well-indented programs, flagging violations as warnings. + + 1. In a brace-delimited region, no statement is allowed to start to the left + of the first statement after the opening brace that starts a new line. + + This rule is helpful for finding missing closing braces. It prevents errors like: + + ```scala + if (x < 0) { + println(1) + println(2) + + println("done") // error: indented too far to the left + ``` + + 2. If significant indentation is turned off (i.e. under Scala 2 mode or under `-no-indent`) and we are at the start of an indented sub-part of an expression, and the indented part ends in a newline, the next statement must start at an indentation width less than the sub-part. This prevents errors where an opening brace was forgotten, as in + + ```scala + if (x < 0) + println(1) + println(2) // error: missing `{` + ``` + +These rules still leave a lot of leeway how programs should be indented. For instance, they do not impose +any restrictions on indentation within expressions, nor do they require that all statements of an indentation block line up exactly. + +The rules are generally helpful in pinpointing the root cause of errors related to missing opening or closing braces. These errors are often quite hard to diagnose, in particular in large programs. + +## Optional Braces + +The compiler will insert `` or `` +tokens at certain line breaks. Grammatically, pairs of `` and `` tokens have the same effect as pairs of braces `{` and `}`. + +The algorithm makes use of a stack `IW` of previously encountered indentation widths. The stack initially holds a single element with a zero indentation width. The _current indentation width_ is the indentation width of the top of the stack. + +There are two rules: + + 1. An `` is inserted at a line break, if + + - An indentation region can start at the current position in the source, and + - the first token on the next line has an indentation width strictly greater + than the current indentation width + + An indentation region can start + + - after the leading parameters of an `extension`, or + - after a `with` in a given instance, or + - after a `:` at the start of a template body (see discussion of `` below), or + - after one of the following tokens: + + ``` + = => ?=> <- catch do else finally for + if match return then throw try while yield + ``` + + - after the closing `)` of a condition in an old-style `if` or `while`. + - after the closing `)` or `}` of the enumerations of an old-style `for` loop without a `do`. + + If an `` is inserted, the indentation width of the token on the next line + is pushed onto `IW`, which makes it the new current indentation width. + + 2. An `` is inserted at a line break, if + + - the first token on the next line has an indentation width strictly less + than the current indentation width, and + - the last token on the previous line is not one of the following tokens + which indicate that the previous statement continues: + ``` + then else do catch finally yield match + ``` + - if the first token on the next line is a + [leading infix operator](../changed-features/operators.md). + then its indentation width is less then the current indentation width, + and it either matches a previous indentation width or is also less + than the enclosing indentation width. + + If an `` is inserted, the top element is popped from `IW`. + If the indentation width of the token on the next line is still less than the new current indentation width, step (2) repeats. Therefore, several `` tokens + may be inserted in a row. + + The following two additional rules support parsing of legacy code with ad-hoc layout. They might be withdrawn in future language versions: + + - An `` is also inserted if the next token following a statement sequence starting with an `` closes an indentation region, i.e. is one of `then`, `else`, `do`, `catch`, `finally`, `yield`, `}`, `)`, `]` or `case`. + + - An `` is finally inserted in front of a comma that follows a statement sequence starting with an `` if the indented region is itself enclosed in parentheses. + +It is an error if the indentation width of the token following an `` does not match the indentation of some previous line in the enclosing indentation region. For instance, the following would be rejected. + +```scala +if x < 0 then + -x + else // error: `else` does not align correctly + x +``` + +Indentation tokens are only inserted in regions where newline statement separators are also inferred: +at the top-level, inside braces `{...}`, but not inside parentheses `(...)`, patterns or types. + +**Note:** The rules for leading infix operators above are there to make sure that +```scala + one + + two.match + case 1 => b + case 2 => c + + three +``` +is parsed as `one + (two.match ...) + three`. Also, that +```scala +if x then + a + + b + + c +else d +``` +is parsed as `if x then a + b + c else d`. + +## Optional Braces Around Template Bodies + +The Scala grammar uses the term _template body_ for the definitions of a class, trait, or object that are normally enclosed in braces. The braces around a template body can also be omitted by means of the following rule. + +A template body can alternatively consist of a colon followed by one or more indented statements. To this purpose we introduce a new `` token that reads as +the standard colon "`:`" but is generated instead of it where `` +is legal according to the context free syntax, but only if the previous token +is an alphanumeric identifier, a backticked identifier, or one of the tokens `this`, `super`, "`)`", and "`]`". + +An indentation region can start after a ``. A template body may be either enclosed in braces, or it may start with +` ` and end with ``. +Analogous rules apply for enum bodies, type refinements, and local packages containing nested definitions. + +With these new rules, the following constructs are all valid: + +```scala +trait A: + def f: Int + +class C(x: Int) extends A: + def f = x + +object O: + def f = 3 + +enum Color: + case Red, Green, Blue + +new A: + def f = 3 + +package p: + def a = 1 + +package q: + def b = 2 +``` + +In each case, the `:` at the end of line can be replaced without change of meaning by a pair of braces that enclose the following indented definition(s). + +The syntax changes allowing this are as follows: + +Define for an arbitrary sequence of tokens or non-terminals `TS`: + +``` +:<<< TS >>> ::= ‘{’ TS ‘}’ + | +``` +Then the grammar changes as follows: +``` +TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> +EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> +Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> +Packaging ::= ‘package’ QualId :<<< TopStats >>> +``` + +## Spaces vs Tabs + +Indentation prefixes can consist of spaces and/or tabs. Indentation widths are the indentation prefixes themselves, ordered by the string prefix relation. So, so for instance "2 tabs, followed by 4 spaces" is strictly less than "2 tabs, followed by 5 spaces", but "2 tabs, followed by 4 spaces" is incomparable to "6 tabs" or to "4 spaces, followed by 2 tabs". It is an error if the indentation width of some line is incomparable with the indentation width of the region that's current at that point. To avoid such errors, it is a good idea not to mix spaces and tabs in the same source file. + +## Indentation and Braces + +Indentation can be mixed freely with braces `{...}`, as well as brackets `[...]` and parentheses `(...)`. For interpreting indentation inside such regions, the following rules apply. + + 1. The assumed indentation width of a multiline region enclosed in braces is the + indentation width of the first token that starts a new line after the opening brace. + + 2. The assumed indentation width of a multiline region inside brackets or parentheses is: + + - if the opening bracket or parenthesis is at the end of a line, the indentation width of token following it, + - otherwise, the indentation width of the enclosing region. + + 3. On encountering a closing brace `}`, bracket `]` or parenthesis `)`, as many `` tokens as necessary are inserted to close all open nested indentation regions. + +For instance, consider: +```scala +{ + val x = f(x: Int, y => + x * ( + y + 1 + ) + + (x + + x) + ) +} +``` + - Here, the indentation width of the region enclosed by the braces is 3 (i.e. the indentation width of the +statement starting with `val`). + - The indentation width of the region in parentheses that follows `f` is also 3, since the opening + parenthesis is not at the end of a line. + - The indentation width of the region in parentheses around `y + 1` is 9 + (i.e. the indentation width of `y + 1`). + - Finally, the indentation width of the last region in parentheses starting with `(x` is 6 (i.e. the indentation width of the indented region following the `=>`. + +## Special Treatment of Case Clauses + +The indentation rules for `match` expressions and `catch` clauses are refined as follows: + +- An indentation region is opened after a `match` or `catch` also if the following `case` + appears at the indentation width that's current for the `match` itself. +- In that case, the indentation region closes at the first token at that + same indentation width that is not a `case`, or at any token with a smaller + indentation width, whichever comes first. + +The rules allow to write `match` expressions where cases are not indented themselves, as in the example below: + +```scala +x match +case 1 => print("I") +case 2 => print("II") +case 3 => print("III") +case 4 => print("IV") +case 5 => print("V") + +println(".") +``` + +## Using Indentation to Signal Statement Continuation + +Indentation is used in some situations to decide whether to insert a virtual semicolon between +two consecutive lines or to treat them as one statement. Virtual semicolon insertion is +suppressed if the second line is indented more relative to the first one, and either the second line +starts with "`(`", "`[`", or "`{`" or the first line ends with `return`. Examples: + +```scala +f(x + 1) + (2, 3) // equivalent to `f(x + 1)(2, 3)` + +g(x + 1) +(2, 3) // equivalent to `g(x + 1); (2, 3)` + +h(x + 1) + {} // equivalent to `h(x + 1){}` + +i(x + 1) +{} // equivalent to `i(x + 1); {}` + +if x < 0 then return + a + b // equivalent to `if x < 0 then return a + b` + +if x < 0 then return +println(a + b) // equivalent to `if x < 0 then return; println(a + b)` +``` +In Scala 2, a line starting with "`{`" always continues the function call on the preceding line, +irrespective of indentation, whereas a virtual semicolon is inserted in all other cases. +The Scala-2 behavior is retained under source `-no-indent` or `-source 3.0-migration`. + + + +## The End Marker + +Indentation-based syntax has many advantages over other conventions. But one possible problem is that it makes it hard to discern when a large indentation region ends, since there is no specific token that delineates the end. Braces are not much better since a brace by itself also contains no information about what region is closed. + +To solve this problem, Scala 3 offers an optional `end` marker. Example: + +```scala +def largeMethod(...) = + ... + if ... then ... + else + ... // a large block + end if + ... // more code +end largeMethod +``` + +An `end` marker consists of the identifier `end` and a follow-on specifier token that together constitute all the tokes of a line. Possible specifier tokens are +identifiers or one of the following keywords + +```scala +if while for match try new this val given +``` + +End markers are allowed in statement sequences. The specifier token `s` of an end marker must correspond to the statement that precedes it. This means: + +- If the statement defines a member `x` then `s` must be the same identifier `x`. +- If the statement defines a constructor then `s` must be `this`. +- If the statement defines an anonymous given, then `s` must be `given`. +- If the statement defines an anonymous extension, then `s` must be `extension`. +- If the statement defines an anonymous class, then `s` must be `new`. +- If the statement is a `val` definition binding a pattern, then `s` must be `val`. +- If the statement is a package clause that refers to package `p`, then `s` must be the same identifier `p`. +- If the statement is an `if`, `while`, `for`, `try`, or `match` statement, then `s` must be that same token. + +For instance, the following end markers are all legal: + +```scala +package p1.p2: + + abstract class C(): + + def this(x: Int) = + this() + if x > 0 then + val a :: b = + x :: Nil + end val + var y = + x + end y + while y > 0 do + println(y) + y -= 1 + end while + try + x match + case 0 => println("0") + case _ => + end match + finally + println("done") + end try + end if + end this + + def f: String + end C + + object C: + given C = + new C: + def f = "!" + end f + end new + end given + end C + + extension (x: C) + def ff: String = x.f ++ x.f + end extension + +end p2 +``` + +### When to Use End Markers + +It is recommended that `end` markers are used for code where the extent of an indentation region is not immediately apparent "at a glance". People will have different preferences what this means, but one can nevertheless give some guidelines that stem from experience. An end marker makes sense if + +- the construct contains blank lines, or +- the construct is long, say 15-20 lines or more, +- the construct ends heavily indented, say 4 indentation levels or more. + +If none of these criteria apply, it's often better to not use an end marker since the code will be just as clear and more concise. If there are several ending regions that satisfy one of the criteria above, we usually need an end marker only for the outermost closed region. So cascades of end markers as in the example above are usually better avoided. + +### Syntax + +``` +EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL +EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ + | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ +BlockStat ::= ... | EndMarker +TemplateStat ::= ... | EndMarker +TopStat ::= ... | EndMarker +``` + +## Example + +Here is a (somewhat meta-circular) example of code using indentation. It provides a concrete representation of indentation widths as defined above together with efficient operations for constructing and comparing indentation widths. + +```scala +enum IndentWidth: + case Run(ch: Char, n: Int) + case Conc(l: IndentWidth, r: Run) + + def <= (that: IndentWidth): Boolean = this match + case Run(ch1, n1) => + that match + case Run(ch2, n2) => n1 <= n2 && (ch1 == ch2 || n1 == 0) + case Conc(l, r) => this <= l + case Conc(l1, r1) => + that match + case Conc(l2, r2) => l1 == l2 && r1 <= r2 + case _ => false + + def < (that: IndentWidth): Boolean = + this <= that && !(that <= this) + + override def toString: String = + this match + case Run(ch, n) => + val kind = ch match + case ' ' => "space" + case '\t' => "tab" + case _ => s"'$ch'-character" + val suffix = if n == 1 then "" else "s" + s"$n $kind$suffix" + case Conc(l, r) => + s"$l, $r" + +object IndentWidth: + private inline val MaxCached = 40 + + private val spaces = IArray.tabulate(MaxCached + 1)(new Run(' ', _)) + private val tabs = IArray.tabulate(MaxCached + 1)(new Run('\t', _)) + + def Run(ch: Char, n: Int): Run = + if n <= MaxCached && ch == ' ' then + spaces(n) + else if n <= MaxCached && ch == '\t' then + tabs(n) + else + new Run(ch, n) + end Run + + val Zero = Run(' ', 0) +end IndentWidth +``` + +## Settings and Rewrites + +Significant indentation is enabled by default. It can be turned off by giving any of the options `-no-indent`, `-old-syntax` and `-source 3.0-migration`. If indentation is turned off, it is nevertheless checked that indentation conforms to the logical program structure as defined by braces. If that is not the case, the compiler issues a warning. + +The Scala 3 compiler can rewrite source code to indented code and back. +When invoked with options `-rewrite -indent` it will rewrite braces to +indented regions where possible. When invoked with options `-rewrite -no-indent` it will rewrite in the reverse direction, inserting braces for indentation regions. +The `-indent` option only works on [new-style syntax](./control-syntax.md). So to go from old-style syntax to new-style indented code one has to invoke the compiler twice, first with options `-rewrite -new-syntax`, then again with options +`-rewrite -indent`. To go in the opposite direction, from indented code to old-style syntax, it's `-rewrite -no-indent`, followed by `-rewrite -old-syntax`. + +## Variant: Indentation Marker `:` for Arguments + +Generally, the possible indentation regions coincide with those regions where braces `{...}` are also legal, no matter whether the braces enclose an expression or a set of definitions. There is one exception, though: Arguments to functions can be enclosed in braces but they cannot be simply indented instead. Making indentation always significant for function arguments would be too restrictive and fragile. + +To allow such arguments to be written without braces, a variant of the indentation scheme is implemented under language import +```scala +import language.experimental.fewerBraces +``` +In this variant, a `` token is also recognized where function argument would be expected. Examples: + +```scala +times(10): + println("ah") + println("ha") +``` + +or + +```scala +credentials `++`: + val file = Path.userHome / ".credentials" + if file.exists + then Seq(Credentials(file)) + else Seq() +``` + +or + +```scala +xs.map: + x => + val y = x - 1 + y * y +``` +What's more, a `:` in these settings can also be followed on the same line by the parameter part and arrow of a lambda. So the last example could be compressed to this: + +```scala +xs.map: x => + val y = x - 1 + y * y +``` +and the following would also be legal: +```scala +xs.foldLeft(0): (x, y) => + x + y +``` + +The grammar changes for this variant are as follows. + +``` +SimpleExpr ::= ... + | SimpleExpr ColonArgument +InfixExpr ::= ... + | InfixExpr id ColonArgument +ColonArgument ::= colon [LambdaStart] + indent (CaseClauses | Block) outdent +LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + | HkTypeParamClause ‘=>’ +``` \ No newline at end of file diff --git a/docs/_spec/TODOreference/other-new-features/matchable.md b/docs/_spec/TODOreference/other-new-features/matchable.md new file mode 100644 index 000000000000..234fdf03220c --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/matchable.md @@ -0,0 +1,141 @@ +--- +layout: doc-page +title: "The Matchable Trait" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html +--- + +A new trait [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) controls the ability to pattern match. + +## The Problem + +The Scala 3 standard library has a type [`IArray`](https://scala-lang.org/api/3.x/scala.html#IArray-0) for immutable +arrays that is defined like this: + +```scala + opaque type IArray[+T] = Array[_ <: T] +``` + +The `IArray` type offers extension methods for `length` and `apply`, but not for `update`; hence it seems values of type `IArray` cannot be updated. + +However, there is a potential hole due to pattern matching. Consider: + +```scala +val imm: IArray[Int] = ... +imm match + case a: Array[Int] => a(0) = 1 +``` + +The test will succeed at runtime since [`IArray`](https://scala-lang.org/api/3.x/scala.html#IArray-0)s _are_ represented as +`Array`s at runtime. But if we allowed it, it would break the fundamental abstraction of immutable arrays. + +__Aside:__ One could also achieve the same by casting: + +```scala +imm.asInstanceOf[Array[Int]](0) = 1 +``` + +But that is not as much of a problem since in Scala `asInstanceOf` is understood to be low-level and unsafe. By contrast, a pattern match that compiles without warning or error should not break abstractions. + +Note also that the problem is not tied to [opaque types](opaques.md) as match selectors. The following slight variant with a value of parametric +type `T` as match selector leads to the same problem: + +```scala +def f[T](x: T) = x match + case a: Array[Int] => a(0) = 0 +f(imm) +``` + +Finally, note that the problem is not linked to just [opaque types](opaques.md). No unbounded type parameter or abstract type should be decomposable with a pattern match. + +## The Solution + +There is a new type [`scala.Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) that controls pattern matching. When typing a pattern match of a constructor pattern `C(...)` or +a type pattern `_: C` it is required that the selector type conforms +to `Matchable`. If that's not the case a warning is issued. For instance when compiling the example at the start of this section we get: + +``` +> sc ../new/test.scala -source future +-- Warning: ../new/test.scala:4:12 --------------------------------------------- +4 | case a: Array[Int] => a(0) = 0 + | ^^^^^^^^^^ + | pattern selector should be an instance of Matchable, + | but it has unmatchable type IArray[Int] instead +``` + +To allow migration from Scala 2 and cross-compiling +between Scala 2 and 3 the warning is turned on only for `-source future-migration` or higher. + +[`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) is a universal trait with `Any` as its parent class. It is extended by both [`AnyVal`](https://scala-lang.org/api/3.x/scala/AnyVal.html) and [`AnyRef`](https://scala-lang.org/api/3.x/scala/AnyRef.html). Since `Matchable` is a supertype of every concrete value or reference class it means that instances of such classes can be matched as before. However, match selectors of the following types will produce a warning: + +- Type `Any`: if pattern matching is required one should use `Matchable` instead. +- Unbounded type parameters and abstract types: If pattern matching is required they should have an upper bound `Matchable`. +- Type parameters and abstract types that are only bounded by some + universal trait: Again, `Matchable` should be added as a bound. + +Here is the hierarchy of top-level classes and traits with their defined methods: + +```scala +abstract class Any: + def getClass + def isInstanceOf + def asInstanceOf + def == + def != + def ## + def equals + def hashCode + def toString + +trait Matchable extends Any + +class AnyVal extends Any, Matchable +class Object extends Any, Matchable +``` + +[`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) is currently a marker trait without any methods. Over time +we might migrate methods `getClass` and `isInstanceOf` to it, since these are closely related to pattern-matching. + +## `Matchable` and Universal Equality + +Methods that pattern-match on selectors of type `Any` will need a cast once the +Matchable warning is turned on. The most common such method is the universal +`equals` method. It will have to be written as in the following example: + +```scala +class C(val x: String): + + override def equals(that: Any): Boolean = + that.asInstanceOf[Matchable] match + case that: C => this.x == that.x + case _ => false +``` + +The cast of `that` to [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) serves as an indication that universal equality +is unsafe in the presence of abstract types and opaque types since it cannot properly distinguish the meaning of a type from its representation. The cast +is guaranteed to succeed at run-time since `Any` and [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) both erase to +`Object`. + +For instance, consider the definitions + +```scala +opaque type Meter = Double +def Meter(x: Double): Meter = x + +opaque type Second = Double +def Second(x: Double): Second = x +``` + +Here, universal `equals` will return true for + +```scala + Meter(10).equals(Second(10)) +``` + +even though this is clearly false mathematically. With [multiversal equality](../contextual/multiversal-equality.md) one can mitigate that problem somewhat by turning + +```scala + import scala.language.strictEquality + Meter(10) == Second(10) +``` + +into a type error. diff --git a/docs/_spec/TODOreference/other-new-features/opaques-details.md b/docs/_spec/TODOreference/other-new-features/opaques-details.md new file mode 100644 index 000000000000..d7305a249089 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/opaques-details.md @@ -0,0 +1,126 @@ +--- +layout: doc-page +title: "Opaque Type Aliases: More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/opaques-details.html +--- + +## Syntax + +``` +Modifier ::= ... + | ‘opaque’ +``` + +`opaque` is a [soft modifier](../soft-modifier.md). It can still be used as a normal identifier when it is not in front of a definition keyword. + +Opaque type aliases must be members of classes, traits, or objects, or they are defined +at the top-level. They cannot be defined in local blocks. + +## Type Checking + +The general form of a (monomorphic) opaque type alias is + +```scala +opaque type T >: L <: U = R +``` + +where the lower bound `L` and the upper bound `U` may be missing, in which case they are assumed to be [`scala.Nothing`](https://scala-lang.org/api/3.x/scala/Nothing.html) and [`scala.Any`](https://scala-lang.org/api/3.x/scala/Any.html), respectively. If bounds are given, it is checked that the right-hand side `R` conforms to them, i.e. `L <: R` and `R <: U`. F-bounds are not supported for opaque type aliases: `T` is not allowed to appear in `L` or `U`. + +Inside the scope of the alias definition, the alias is transparent: `T` is treated +as a normal alias of `R`. Outside its scope, the alias is treated as the abstract type +```scala +type T >: L <: U +``` +A special case arises if the opaque type alias is defined in an object. Example: + +```scala +object o: + opaque type T = R +``` + +In this case we have inside the object (also for non-opaque types) that `o.T` is equal to +`T` or its expanded form `o.this.T`. Equality is understood here as mutual subtyping, i.e. +`o.T <: o.this.T` and `o.this.T <: T`. Furthermore, we have by the rules of opaque type aliases +that `o.this.T` equals `R`. The two equalities compose. That is, inside `o`, it is +also known that `o.T` is equal to `R`. This means the following code type-checks: + +```scala +object o: + opaque type T = Int + val x: Int = id(2) +def id(x: o.T): o.T = x +``` + +Opaque type aliases cannot be `private` and cannot be overridden in subclasses. +Opaque type aliases cannot have a context function type as right-hand side. + +## Type Parameters of Opaque Types + +Opaque type aliases can have a single type parameter list. The following aliases +are well-formed +```scala +opaque type F[T] = (T, T) +opaque type G = [T] =>> List[T] +``` +but the following are not: +```scala +opaque type BadF[T] = [U] =>> (T, U) +opaque type BadG = [T] =>> [U] => (T, U) +``` + +## Translation of Equality + +Comparing two values of opaque type with `==` or `!=` normally uses universal equality, +unless another overloaded `==` or `!=` operator is defined for the type. To avoid +boxing, the operation is mapped after type checking to the (in-)equality operator +defined on the underlying type. For instance, +```scala + opaque type T = Int + + ... + val x: T + val y: T + x == y // uses Int equality for the comparison. +``` + +## Top-level Opaque Types + +An opaque type alias on the top-level is transparent in all other top-level definitions in the sourcefile where it appears, but is opaque in nested +objects and classes and in all other source files. Example: +```scala +// in test1.scala +opaque type A = String +val x: A = "abc" + +object obj: + val y: A = "abc" // error: found: "abc", required: A + +// in test2.scala +def z: String = x // error: found: A, required: String +``` +This behavior becomes clear if one recalls that top-level definitions are placed in their own synthetic object. For instance, the code in `test1.scala` would expand to +```scala +object test1$package: + opaque type A = String + val x: A = "abc" + +object obj: + val y: A = "abc" // error: cannot assign "abc" to opaque type alias A +``` +The opaque type alias `A` is transparent in its scope, which includes the definition of `x`, but not the definitions of `obj` and `y`. + + +## Relationship to SIP 35 + +Opaque types in Scala 3 are an evolution from what is described in +[Scala SIP 35](https://docs.scala-lang.org/sips/opaque-types.html). + +The differences compared to the state described in this SIP are: + + 1. Opaque type aliases cannot be defined anymore in local statement sequences. + 2. The scope where an opaque type alias is visible is now the whole scope where + it is defined, instead of just a companion object. + 3. The notion of a companion object for opaque type aliases has been dropped. + 4. Opaque type aliases can have bounds. + 5. The notion of type equality involving opaque type aliases has been clarified. It was + strengthened with respect to the previous implementation of SIP 35. diff --git a/docs/_spec/TODOreference/other-new-features/opaques.md b/docs/_spec/TODOreference/other-new-features/opaques.md new file mode 100644 index 000000000000..d8c4d37bcb3b --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/opaques.md @@ -0,0 +1,179 @@ +--- +layout: doc-page +title: "Opaque Type Aliases" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/opaques.html +--- + +Opaque types aliases provide type abstraction without any overhead. Example: + +```scala +object MyMath: + + opaque type Logarithm = Double + + object Logarithm: + + // These are the two ways to lift to the Logarithm type + + def apply(d: Double): Logarithm = math.log(d) + + def safe(d: Double): Option[Logarithm] = + if d > 0.0 then Some(math.log(d)) else None + + end Logarithm + + // Extension methods define opaque types' public APIs + extension (x: Logarithm) + def toDouble: Double = math.exp(x) + def + (y: Logarithm): Logarithm = Logarithm(math.exp(x) + math.exp(y)) + def * (y: Logarithm): Logarithm = x + y + +end MyMath +``` + +This introduces `Logarithm` as a new abstract type, which is implemented as `Double`. +The fact that `Logarithm` is the same as `Double` is only known in the scope where +`Logarithm` is defined, which in the above example corresponds to the object `MyMath`. +Or in other words, within the scope, it is treated as a type alias, but this is opaque to the outside world +where, in consequence, `Logarithm` is seen as an abstract type that has nothing to do with `Double`. + +The public API of `Logarithm` consists of the `apply` and `safe` methods defined in the companion object. +They convert from `Double`s to `Logarithm` values. Moreover, an operation `toDouble` that converts the other way, and operations `+` and `*` are defined as extension methods on `Logarithm` values. +The following operations would be valid because they use functionality implemented in the `MyMath` object. + +```scala +import MyMath.Logarithm + +val l = Logarithm(1.0) +val l2 = Logarithm(2.0) +val l3 = l * l2 +val l4 = l + l2 +``` + +But the following operations would lead to type errors: + +```scala +val d: Double = l // error: found: Logarithm, required: Double +val l2: Logarithm = 1.0 // error: found: Double, required: Logarithm +l * 2 // error: found: Int(2), required: Logarithm +l / l2 // error: `/` is not a member of Logarithm +``` + +## Bounds For Opaque Type Aliases + +Opaque type aliases can also come with bounds. Example: + +```scala +object Access: + + opaque type Permissions = Int + opaque type PermissionChoice = Int + opaque type Permission <: Permissions & PermissionChoice = Int + + extension (x: PermissionChoice) + def | (y: PermissionChoice): PermissionChoice = x | y + extension (x: Permissions) + def & (y: Permissions): Permissions = x | y + extension (granted: Permissions) + def is(required: Permissions) = (granted & required) == required + def isOneOf(required: PermissionChoice) = (granted & required) != 0 + + val NoPermission: Permission = 0 + val Read: Permission = 1 + val Write: Permission = 2 + val ReadWrite: Permissions = Read | Write + val ReadOrWrite: PermissionChoice = Read | Write + +end Access +``` + +The `Access` object defines three opaque type aliases: + +- `Permission`, representing a single permission, +- `Permissions`, representing a set of permissions with the meaning "all of these permissions granted", +- `PermissionChoice`, representing a set of permissions with the meaning "at least one of these permissions granted". + +Outside the `Access` object, values of type `Permissions` may be combined using the `&` operator, +where `x & y` means "all permissions in `x` *and* in `y` granted". +Values of type `PermissionChoice` may be combined using the `|` operator, +where `x | y` means "a permission in `x` *or* in `y` granted". + +Note that inside the `Access` object, the `&` and `|` operators always resolve to the corresponding methods of `Int`, +because members always take precedence over extension methods. +For that reason, the `|` extension method in `Access` does not cause infinite recursion. + +In particular, the definition of `ReadWrite` must use `|`, the bitwise operator for `Int`, +even though client code outside `Access` would use `&`, the extension method on `Permissions`. +The internal representations of `ReadWrite` and `ReadOrWrite` are identical, but this is not visible to the client, +which is interested only in the semantics of `Permissions`, as in the example below. + +All three opaque type aliases have the same underlying representation type `Int`. The +`Permission` type has an upper bound `Permissions & PermissionChoice`. This makes +it known outside the `Access` object that `Permission` is a subtype of the other +two types. Hence, the following usage scenario type-checks. + +```scala +object User: + import Access.* + + case class Item(rights: Permissions) + extension (item: Item) + def +(other: Item): Item = Item(item.rights & other.rights) + + val roItem = Item(Read) // OK, since Permission <: Permissions + val woItem = Item(Write) + val rwItem = Item(ReadWrite) + val noItem = Item(NoPermission) + + assert(!roItem.rights.is(ReadWrite)) + assert(roItem.rights.isOneOf(ReadOrWrite)) + + assert(rwItem.rights.is(ReadWrite)) + assert(rwItem.rights.isOneOf(ReadOrWrite)) + + assert(!noItem.rights.is(ReadWrite)) + assert(!noItem.rights.isOneOf(ReadOrWrite)) + + assert((roItem + woItem).rights.is(ReadWrite)) +end User +``` +On the other hand, the call `roItem.rights.isOneOf(ReadWrite)` would give a type error: +```scala + assert(roItem.rights.isOneOf(ReadWrite)) + ^^^^^^^^^ + Found: (Access.ReadWrite : Access.Permissions) + Required: Access.PermissionChoice +``` +`Permissions` and `PermissionChoice` are different, unrelated types outside `Access`. + + +## Opaque Type Members on Classes +While typically, opaque types are used together with objects to hide implementation details of a module, they can also be used with classes. + +For example, we can redefine the above example of Logarithms as a class. +```scala +class Logarithms: + + opaque type Logarithm = Double + + def apply(d: Double): Logarithm = math.log(d) + + def safe(d: Double): Option[Logarithm] = + if d > 0.0 then Some(math.log(d)) else None + + def mul(x: Logarithm, y: Logarithm) = x + y +``` + +Opaque type members of different instances are treated as different: +```scala +val l1 = new Logarithms +val l2 = new Logarithms +val x = l1(1.5) +val y = l1(2.6) +val z = l2(3.1) +l1.mul(x, y) // type checks +l1.mul(x, z) // error: found l2.Logarithm, required l1.Logarithm +``` +In general, one can think of an opaque type as being only transparent in the scope of `private[this]`. + +[More details](opaques-details.md) diff --git a/docs/_spec/TODOreference/other-new-features/open-classes.md b/docs/_spec/TODOreference/other-new-features/open-classes.md new file mode 100644 index 000000000000..764c234df599 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/open-classes.md @@ -0,0 +1,80 @@ +--- +layout: doc-page +title: "Open Classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/open-classes.html +--- + +An `open` modifier on a class signals that the class is planned for extensions. Example: +```scala +// File Writer.scala +package p + +open class Writer[T]: + + /** Sends to stdout, can be overridden */ + def send(x: T) = println(x) + + /** Sends all arguments using `send` */ + def sendAll(xs: T*) = xs.foreach(send) +end Writer + +// File EncryptedWriter.scala +package p + +class EncryptedWriter[T: Encryptable] extends Writer[T]: + override def send(x: T) = super.send(encrypt(x)) +``` +An open class typically comes with some documentation that describes +the internal calling patterns between methods of the class as well as hooks that can be overridden. We call this the _extension contract_ of the class. It is different from the _external contract_ between a class and its users. + +Classes that are not open can still be extended, but only if at least one of two alternative conditions is met: + + - The extending class is in the same source file as the extended class. In this case, the extension is usually an internal implementation matter. + + - The language feature [`adhocExtensions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$adhocExtensions$.html) is enabled for the extending class. This is typically enabled by an import clause in the source file of the extension: + ```scala + import scala.language.adhocExtensions + ``` + Alternatively, the feature can be enabled by the compiler option `-language:adhocExtensions`. + If the feature is not enabled, the compiler will issue a "feature" warning. For instance, if the `open` modifier on class `Writer` is dropped, compiling `EncryptedWriter` would produce a warning: + ``` + -- Feature Warning: EncryptedWriter.scala:6:14 ---- + |class EncryptedWriter[T: Encryptable] extends Writer[T] + | ^ + |Unless class Writer is declared 'open', its extension + | in a separate file should be enabled + |by adding the import clause 'import scala.language.adhocExtensions' + |or by setting the compiler option -language:adhocExtensions. + ``` + +## Motivation + +When writing a class, there are three possible expectations of extensibility: + +1. The class is intended to allow extensions. This means one should expect +a carefully worked out and documented extension contract for the class. + +2. Extensions of the class are forbidden, for instance to make correctness or security guarantees. + +3. There is no firm decision either way. The class is not _a priori_ intended for extensions, but if others find it useful to extend on an _ad-hoc_ basis, let them go ahead. However, they are on their own in this case. There is no documented extension contract, and future versions of the class might break the extensions (by rearranging internal call patterns, for instance). + +The three cases are clearly distinguished by using `open` for (1), `final` for (2) and no modifier for (3). + +It is good practice to avoid _ad-hoc_ extensions in a code base, since they tend to lead to fragile systems that are hard to evolve. But there +are still some situations where these extensions are useful: for instance, +to mock classes in tests, or to apply temporary patches that add features or fix bugs in library classes. That's why _ad-hoc_ extensions are permitted, but only if there is an explicit opt-in via a language feature import. + +## Details + + - `open` is a soft modifier. It is treated as a normal identifier + unless it is in modifier position. + - An `open` class cannot be `final` or `sealed`. + - Traits or `abstract` classes are always `open`, so `open` is redundant for them. + +## Relationship with `sealed` + +A class that is neither `abstract` nor `open` is similar to a `sealed` class: it can still be extended, but only in the same source file. The difference is what happens if an extension of the class is attempted in another source file. For a `sealed` class, this is an error, whereas for a simple non-open class, this is still permitted provided the [`adhocExtensions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$adhocExtensions$.html) feature is enabled, and it gives a warning otherwise. + +## Migration + +`open` is a new modifier in Scala 3. To allow cross compilation between Scala 2.13 and Scala 3.0 without warnings, the feature warning for ad-hoc extensions is produced only under `-source future`. It will be produced by default from Scala 3.1 on. diff --git a/docs/_spec/TODOreference/other-new-features/other-new-features.md b/docs/_spec/TODOreference/other-new-features/other-new-features.md new file mode 100644 index 000000000000..974a8548cb68 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/other-new-features.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Other New Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features.html +--- + +The following pages document new features of Scala 3. diff --git a/docs/_spec/TODOreference/other-new-features/parameter-untupling-spec.md b/docs/_spec/TODOreference/other-new-features/parameter-untupling-spec.md new file mode 100644 index 000000000000..e5165550fc0d --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/parameter-untupling-spec.md @@ -0,0 +1,89 @@ +--- +layout: doc-page +title: "Parameter Untupling - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/parameter-untupling-spec.html +--- + +## Motivation + +Say you have a list of pairs + +```scala +val xs: List[(Int, Int)] +``` + +and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to their sum. +Previously, the best way to do this was with a pattern-matching decomposition: + +```scala +xs.map { + case (x, y) => x + y +} +``` +While correct, this is inconvenient. Instead, we propose to write it the following way: + +```scala +xs.map { + (x, y) => x + y +} +``` + +or, equivalently: + +```scala +xs.map(_ + _) +``` + +Generally, a function value with `n > 1` parameters can be converted to a function with tupled arguments if the expected type is a unary function type of the form `((T_1, ..., T_n)) => U`. + +## Type Checking + +The type checking happens in two steps: + +1. Check whether parameter untupling is feasible +2. Adapt the function and type check it + +### Feasibility Check + +Suppose a function `f` of the form `(p1, ..., pn) => e` (where `n > 1`), with `p1, ..., pn` as parameters and `e` as function body. + +If the expected type for checking `f` is a fully defined function type of the form `TupleN[T1, ..., Tn] => R` (or an equivalent SAM-type), where each type `Ti` fits the corresponding parameter `pi`. Then `f` is feasible for parameter untupling with the expected type `TupleN[T1, ..., Tn] => R`. + +A type `Ti` fits a parameter `pi` if one of the following two cases is `true`: + +* `pi` comes without a type, i.e. it is a simple identifier or `_`. +* `pi` is of the form `x: Ui` or `_: Ui` and `Ti <: Ui`. + +Parameter untupling composes with eta-expansion. That is, an n-ary function generated by eta-expansion can in turn be adapted to the expected type with parameter untupling. + +### Term adaptation + +If the function + +```scala +(p1, ..., pn) => e +``` + +is feasible for parameter untupling with the expected type `TupleN[T1, ..., Tn] => Te`, then continue to type check the following adapted function + +```scala +(x: TupleN[T1, ..., Tn]) => + def p1: T1 = x._1 + ... + def pn: Tn = x._n + e +``` + +with the same expected type. +## Migration + +Code like this could not be written before, hence the new notation is not ambiguous after adoption. + +It is possible that someone has written an implicit conversion from `(T1, ..., Tn) => R` to `TupleN[T1, ..., Tn] => R` for some `n`. +Such a conversion is now only useful for general conversions of function values, when parameter untupling is not applicable. +Some care is required to implement the conversion efficiently. +Obsolete conversions could be detected and fixed by [`Scalafix`](https://scalacenter.github.io/scalafix/). + +## Reference + +For more information, see [Issue #897](https://github.com/lampepfl/dotty/issues/897). diff --git a/docs/_spec/TODOreference/other-new-features/parameter-untupling.md b/docs/_spec/TODOreference/other-new-features/parameter-untupling.md new file mode 100644 index 000000000000..fcc1fa11d519 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/parameter-untupling.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: "Parameter Untupling" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/parameter-untupling.html +--- + +Say you have a list of pairs + +```scala +val xs: List[(Int, Int)] +``` + +and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to +their sum. Previously, the best way to do this was with a pattern-matching decomposition: + +```scala +xs map { + case (x, y) => x + y +} +``` + +While correct, this is also inconvenient and confusing, since the `case` +suggests that the pattern match could fail. As a shorter and clearer alternative Scala 3 now allows + +```scala +xs.map { + (x, y) => x + y +} +``` + +or, equivalently: + +```scala +xs.map(_ + _) +``` +and +```scala +def combine(i: Int, j: Int) = i + j +xs.map(combine) +``` + +Generally, a function value with `n > 1` parameters is wrapped in a +function type of the form `((T_1, ..., T_n)) => U` if that is the expected type. +The tuple parameter is decomposed and its elements are passed directly to the underlying function. + +More specifically, the adaptation is applied to the mismatching formal +parameter list. In particular, the adaptation is not a conversion +between function types. That is why the following is not accepted: + +```scala +val combiner: (Int, Int) => Int = _ + _ +xs.map(combiner) // Type Mismatch +``` + +The function value must be explicitly tupled, rather than the parameters untupled: +```scala +xs.map(combiner.tupled) +``` + +A conversion may be provided in user code: + +```scala +import scala.language.implicitConversions +transparent inline implicit def `fallback untupling`(f: (Int, Int) => Int): ((Int, Int)) => Int = + p => f(p._1, p._2) // use specialized apply instead of unspecialized `tupled` +xs.map(combiner) +``` + +Parameter untupling is attempted before conversions are applied, so that a conversion in scope +cannot subvert untupling. + +## Reference + +For more information see: + +* [More details](./parameter-untupling-spec.md) +* [Issue #897](https://github.com/lampepfl/dotty/issues/897). diff --git a/docs/_spec/TODOreference/other-new-features/safe-initialization.md b/docs/_spec/TODOreference/other-new-features/safe-initialization.md new file mode 100644 index 000000000000..757038eac786 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/safe-initialization.md @@ -0,0 +1,343 @@ +--- +layout: doc-page +title: "Safe Initialization" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/safe-initialization.html +--- + +Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Ysafe-init`. + +The design and implementation of the initialization checker is described in the +paper _Safe object initialization, abstractly_ [3]. + +## A Quick Glance + +To get a feel of how it works, we first show several examples below. + +### Parent-Child Interaction + +Given the following code snippet: + +``` scala +abstract class AbstractFile: + def name: String + val extension: String = name.substring(4) + +class RemoteFile(url: String) extends AbstractFile: + val localFile: String = s"${url.##}.tmp" // error: usage of `localFile` before it's initialized + def name: String = localFile +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/AbstractFile.scala:7:4 ------------------------------ +7 | val localFile: String = s"${url.##}.tmp" // error: usage of `localFile` before it's initialized + | ^ + | Access non-initialized field value localFile. Calling trace: + | -> val extension: String = name.substring(4) [ AbstractFile.scala:3 ] + | -> def name: String = localFile [ AbstractFile.scala:8 ] +``` + +### Inner-Outer Interaction + +Given the code below: + +``` scala +object Trees: + class ValDef { counter += 1 } + class EmptyValDef extends ValDef + val theEmptyValDef = new EmptyValDef + private var counter = 0 // error +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/trees.scala:5:14 ------------------------------------ +5 | private var counter = 0 // error + | ^ + | Access non-initialized field variable counter. Calling trace: + | -> val theEmptyValDef = new EmptyValDef [ trees.scala:4 ] + | -> class EmptyValDef extends ValDef [ trees.scala:3 ] + | -> class ValDef { counter += 1 } [ trees.scala:2 ] +``` + +### Functions + +Given the code below: + +``` scala +abstract class Parent: + val f: () => String = () => this.message + def message: String + +class Child extends Parent: + val a = f() + val b = "hello" // error + def message: String = b +``` + +The checker reports: + +``` scala +-- Warning: tests/init/neg/features-high-order.scala:7:6 ----------------------- +7 | val b = "hello" // error + | ^ + |Access non-initialized field value b. Calling trace: + | -> val a = f() [ features-high-order.scala:6 ] + | -> val f: () => String = () => this.message [ features-high-order.scala:2 ] + | -> def message: String = b [ features-high-order.scala:8 ] +``` +## Design Goals + +We establish the following design goals: + +- __Sound__: checking always terminates, and is sound for common and reasonable usage (over-approximation) +- __Expressive__: support common and reasonable initialization patterns +- __Friendly__: simple rules, minimal syntactic overhead, informative error messages +- __Modular__: modular checking, no analysis beyond project boundary +- __Fast__: instant feedback +- __Simple__: no changes to core type system, explainable by a simple theory + +By _reasonable usage_, we include the following use cases (but not restricted to them): + +- Access fields on `this` and outer `this` during initialization +- Call methods on `this` and outer `this` during initialization +- Instantiate inner class and call methods on such instances during initialization +- Capture fields in functions + +## Principles + +To achieve the goals, we uphold the following fundamental principles: +_stackability_, _monotonicity_, _scopability_ and _authority_. + +Stackability means that all fields of a class are initialized at the end of the +class body. Scala enforces this property in syntax by demanding that all fields +are initialized at the end of the primary constructor, except for the language +feature below: + +``` scala +var x: T = _ +``` + +Control effects such as exceptions may break this property, as the +following example shows: + +``` scala +class MyException(val b: B) extends Exception("") +class A: + val b = try { new B } catch { case myEx: MyException => myEx.b } + println(b.a) + +class B: + throw new MyException(this) + val a: Int = 1 +``` + +In the code above, the control effect teleport the uninitialized value +wrapped in an exception. In the implementation, we avoid the problem +by ensuring that the values that are thrown must be transitively initialized. + +Monotonicity means that the initialization status of an object should +not go backward: initialized fields continue to be initialized, a +field points to an initialized object may not later point to an +object under initialization. As an example, the following code will be rejected: + +``` scala +trait Reporter: + def report(msg: String): Unit + +class FileReporter(ctx: Context) extends Reporter: + ctx.typer.reporter = this // ctx now reaches an uninitialized object + val file: File = new File("report.txt") + def report(msg: String) = file.write(msg) +``` + +In the code above, suppose `ctx` points to a transitively initialized +object. Now the assignment at line 3 makes `this`, which is not fully +initialized, reachable from `ctx`. This makes field usage dangerous, +as it may indirectly reach uninitialized fields. + +Monotonicity is based on a well-known technique called _heap monotonic +typestate_ to ensure soundness in the presence of aliasing +[1]. Roughly speaking, it means initialization state should not go backwards. + +Scopability means that there are no side channels to access to partially +constructed objects. Control effects like coroutines, delimited +control, resumable exceptions may break the property, as they can transport a +value upper in the stack (not in scope) to be reachable from the current scope. +Static fields can also serve as a teleport thus breaks this property. In the +implementation, we need to enforce that teleported values are transitively +initialized. + +The three principles above contribute to _local reasoning about initialization_, +which means: + +> An initialized environment can only produce initialized values. + +For example, if the arguments to an `new`-expression are transitively +initialized, so is the result. If the receiver and arguments in a method call +are transitively initialized, so is the result. + +Local reasoning about initialization gives rise to a fast initialization +checker, as it avoids whole-program analysis. + +The principle of authority goes hand-in-hand with monotonicity: the principle +of monotonicity stipulates that initialization states cannot go backwards, while +the principle of authority stipulates that the initialization states may not +go forward at arbitrary locations due to aliasing. In Scala, we may only +advance initialization states of objects in the class body when a field is +defined with a mandatory initializer or at local reasoning points when the object +becomes transitively initialized. + +## Abstract Values + +There are three fundamental abstractions for initialization states of objects: + +- __Cold__: A cold object may have uninitialized fields. +- __Warm__: A warm object has all its fields initialized but may reach _cold_ objects. +- __Hot__: A hot object is transitively initialized, i.e., it only reaches warm objects. + +In the initialization checker, the abstraction `Warm` is refined to handle inner +classes and multiple constructors: + +- __Warm[C] { outer = V, ctor, args = Vs }__: A warm object of class `C`, where the immediate outer of `C` is `V`, the constructor is `ctor` and constructor arguments are `Vs`. + +The initialization checker checks each concrete class separately. The abstraction `ThisRef` +represents the current object under initialization: + +- __ThisRef[C]__: The current object of class `C` under initialization. + +The initialization state of the current object is stored in the abstract heap as an +abstract object. The abstract heap also serves as a cache for the field values +of warm objects. `Warm` and `ThisRef` are "addresses" of the abstract objects stored +in the abstract heap. + +Two more abstractions are introduced to support functions and conditional +expressions: + +- __Fun(e, V, C)__: An abstract function value where `e` is the code, `V` is the + abstract value for `this` inside the function body and the function is located + inside the class `C`. + +- __Refset(Vs)__: A set of abstract values `Vs`. + +A value `v` is _effectively hot_ if any of the following is true: + +- `v` is `Hot`. +- `v` is `ThisRef` and all fields of the underlying object are assigned. +- `v` is `Warm[C] { ... }` and + 1. `C` does not contain inner classes; and + 2. Calling any method on `v` encounters no initialization errors and the method return value is _effectively hot_; and + 3. Each field of `v` is _effectively hot_. +- `v` is `Fun(e, V, C)` and calling the function encounters no errors and the + function return value is _effectively hot_. +- The root object (refered by `ThisRef`) is _effectively hot_. + +An effectively hot value can be regarded as transitively initialized thus can +be safely leaked via method arguments or as RHS of reassignment. +The initialization checker tries to promote non-hot values to effectively hot +whenenver possible. + +## Rules + +With the established principles and design goals, the following rules are imposed: + +1. The field access `e.f` or method call `e.m()` is illegal if `e` is _cold_. + + A cold value should not be used. + +2. The field access `e.f` is invalid if `e` has the value `ThisRef` and `f` is not initialized. + +3. In an assignment `o.x = e`, the expression `e` must be _effectively hot_. + + This is how monotonicity is enforced in the system. Note that in an + initialization `val f: T = e`, the expression `e` may point to a non-hot + value. + +4. Arguments to method calls must be _effectively hot_. + + Escape of `this` in the constructor is commonly regarded as an anti-pattern. + + However, passing non-hot values as argument to another constructor is allowed, to support + creation of cyclic data structures. The checker will ensure that the escaped + non-initialized object is not used, i.e. calling methods or accessing fields + on the escaped object is not allowed. + + An exception is for calling synthetic `apply`s of case classes. For example, + the method call `Some.apply(e)` will be interpreted as `new Some(e)`, thus + is valid even if `e` is not hot. + + Another exception to this rule is parametric method calls. For example, in + `List.apply(e)`, the argument `e` may be non-hot. If that is the case, the + result value of the parametric method call is taken as _cold_. + +5. Method calls on hot values with effectively hot arguments produce hot results. + + This rule is assured by local reasoning about initialization. + +6. Method calls on `ThisRef` and warm values will be resolved statically and the + corresponding method bodies are checked. + +7. In a new expression `new p.C(args)`, if the values of `p` and `args` are + effectively hot, then the result value is also hot. + + This is assured by local reasoning about initialization. + +8. In a new expression `new p.C(args)`, if any value of `p` and `args` is not + effectively hot, then the result value takes the form `Warm[C] { outer = Vp, args = Vargs }`. The initialization code for the class `C` is checked again to make + sure the non-hot values are used properly. + + In the above, `Vp` is the widened value of `p` --- the widening happens if `p` + is a warm value `Warm[D] { outer = V, args }` and we widen it to + `Warm[D] { outer = Cold, args }`. + + The variable `Vargs` represents values of `args` with non-hot values widened + to `Cold`. + + The motivation for the widening is to finitize the abstract domain and ensure + termination of the initialization check. + +9. The scrutinee in a pattern match and the values in return and throw statements must be _effectively hot_. + +## Modularity + +The analysis takes the primary constructor of concrete classes as entry points. +It follows the constructors of super classes, which might be defined in another project. +The analysis takes advantage of TASTy for analyzing super classes defined in another project. + +The crossing of project boundary raises a concern about modularity. It is +well-known in object-oriented programming that superclass and subclass are +tightly coupled. For example, adding a method in the superclass requires +recompiling the child class for checking safe overriding. + +Initialization is no exception in this respect. The initialization of an object +essentially involves close interaction between subclass and superclass. If the +superclass is defined in another project, the crossing of project boundary +cannot be avoided for soundness of the analysis. + +Meanwhile, inheritance across project boundary has been under scrutiny and the +introduction of [open classes](./open-classes.md) mitigate the concern here. +For example, the initialization check could enforce that the constructors of +open classes may not contain method calls on `this` or introduce annotations as +a contract. + +The feedback from the community on the topic is welcome. + +## Back Doors + +Occasionally you may want to suppress warnings reported by the +checker. You can either write `e: @unchecked` to tell the checker to +skip checking for the expression `e`, or you may use the old trick: +mark some fields as lazy. + +## Caveats + +- The system cannot provide safety guarantee when extending Java or Scala 2 classes. +- Safe initialization of global objects is only partially checked. + +## References + +1. Fähndrich, M. and Leino, K.R.M., 2003, July. [_Heap monotonic typestates_](https://www.microsoft.com/en-us/research/publication/heap-monotonic-typestate/). In International Workshop on Aliasing, Confinement and Ownership in object-oriented programming (IWACO). +2. Fengyun Liu, Ondřej Lhoták, Aggelos Biboudis, Paolo G. Giarrusso, and Martin Odersky. [_A type-and-effect system for object initialization_](https://dl.acm.org/doi/10.1145/3428243). OOPSLA, 2020. +3. Fengyun Liu, Ondřej Lhoták, Enze Xing, Nguyen Cao Pham. [_Safe object initialization, abstractly_](https://dl.acm.org/doi/10.1145/3486610.3486895). Scala 2021. diff --git a/docs/_spec/TODOreference/other-new-features/targetName.md b/docs/_spec/TODOreference/other-new-features/targetName.md new file mode 100644 index 000000000000..63c4cf1ec0df --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/targetName.md @@ -0,0 +1,118 @@ +--- +layout: doc-page +title: "The @targetName annotation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/targetName.html +--- + +A [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation on a definition defines an alternate name for the implementation of that definition. Example: + +```scala +import scala.annotation.targetName + +object VecOps: + extension [T](xs: Vec[T]) + @targetName("append") + def ++= [T] (ys: Vec[T]): Vec[T] = ... +``` + +Here, the `++=` operation is implemented (in Byte code or native code) under the name `append`. The implementation name affects the code that is generated, and is the name under which code from other languages can call the method. For instance, `++=` could be invoked from Java like this: + +```java +VecOps.append(vec1, vec2) +``` + +The [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation has no bearing on Scala usages. Any application of that method in Scala has to use `++=`, not `append`. + +## Details + + 1. `@targetName` is defined in package `scala.annotation`. It takes a single argument + of type `String`. That string is called the _external name_ of the definition + that's annotated. + + 2. A `@targetName` annotation can be given for all kinds of definitions except a top-level `class`, `trait`, or `object`. + + 3. The name given in a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation must be a legal name + for the defined entities on the host platform. + + 4. It is recommended that definitions with symbolic names have a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation. This will establish an alternate name that is easier to search for and + will avoid cryptic encodings in runtime diagnostics. + + 5. Definitions with names in backticks that are not legal host platform names + should also have a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation. + +## Relationship with Overriding + +[`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotations are significant for matching two method definitions to decide whether they conflict or override each other. Two method definitions match if they have the same name, signature, and erased name. Here, + +- The _signature_ of a definition consists of the names of the erased types of all (value-) parameters and the method's result type. +- The _erased name_ of a method definition is its target name if a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation is given and its defined name otherwise. + +This means that `@targetName` annotations can be used to disambiguate two method definitions that would otherwise clash. For instance. + +```scala +def f(x: => String): Int = x.length +def f(x: => Int): Int = x + 1 // error: double definition +``` + +The two definitions above clash since their erased parameter types are both [`Function0`](https://scala-lang.org/api/3.x/scala/Function0.html), which is the type of the translation of a by-name-parameter. Hence they have the same names and signatures. But we can avoid the clash by adding a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation to either method or to both of them. Example: + +```scala +@targetName("f_string") +def f(x: => String): Int = x.length +def f(x: => Int): Int = x + 1 // OK +``` + +This will produce methods `f_string` and `f` in the generated code. + +However, [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotations are not allowed to break overriding relationships +between two definitions that have otherwise the same names and types. So the following would be in error: + +```scala +import annotation.targetName +class A: + def f(): Int = 1 +class B extends A: + @targetName("g") def f(): Int = 2 +``` + +The compiler reports here: + +``` +-- Error: test.scala:6:23 ------------------------------------------------------ +6 | @targetName("g") def f(): Int = 2 + | ^ + |error overriding method f in class A of type (): Int; + | method f of type (): Int should not have a @targetName + | annotation since the overridden member hasn't one either +``` + +The relevant overriding rules can be summarized as follows: + +- Two members can override each other if their names and signatures are the same, + and they either have the same erased names or the same types. +- If two members override, then both their erased names and their types must be the same. + +As usual, any overriding relationship in the generated code must also +be present in the original code. So the following example would also be in error: + +```scala +import annotation.targetName +class A: + def f(): Int = 1 +class B extends A: + @targetName("f") def g(): Int = 2 +``` + +Here, the original methods `g` and `f` do not override each other since they have +different names. But once we switch to target names, there is a clash that is reported by the compiler: + +``` +-- [E120] Naming Error: test.scala:4:6 ----------------------------------------- +4 |class B extends A: + | ^ + | Name clash between defined and inherited member: + | def f(): Int in class A at line 3 and + | def g(): Int in class B at line 5 + | have the same name and type after erasure. +1 error found +``` diff --git a/docs/_spec/TODOreference/other-new-features/threadUnsafe-annotation.md b/docs/_spec/TODOreference/other-new-features/threadUnsafe-annotation.md new file mode 100644 index 000000000000..ae1af1e4b671 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/threadUnsafe-annotation.md @@ -0,0 +1,18 @@ +--- +layout: doc-page +title: "The @threadUnsafe annotation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/threadUnsafe-annotation.html +--- + +A new annotation [`@threadUnsafe`](https://scala-lang.org/api/3.x/scala/annotation/threadUnsafe.html) can be used on a field which defines +a `lazy val`. When this annotation is used, the initialization of the +[`lazy val`](../changed-features/lazy-vals-init.md) will use a faster mechanism which is not thread-safe. + +## Example + +```scala +import scala.annotation.threadUnsafe + +class Hello: + @threadUnsafe lazy val x: Int = 1 +``` diff --git a/docs/_spec/TODOreference/other-new-features/transparent-traits.md b/docs/_spec/TODOreference/other-new-features/transparent-traits.md new file mode 100644 index 000000000000..699ce0b9ddd8 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/transparent-traits.md @@ -0,0 +1,70 @@ +--- +layout: doc-page +title: "Transparent Traits" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/transparent-traits.html +--- + +Traits are used in two roles: + + 1. As mixins for other classes and traits + 2. As types of vals, defs, or parameters + +Some traits are used primarily in the first role, and we usually do not want to see them in inferred types. An example is the [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) trait that the compiler adds as a mixin trait to every case class or case object. In Scala 2, this parent trait sometimes makes inferred types more complicated than they should be. Example: + +```scala +trait Kind +case object Var extends Kind +case object Val extends Kind +val x = Set(if condition then Val else Var) +``` + +Here, the inferred type of `x` is `Set[Kind & Product & Serializable]` whereas one would have hoped it to be `Set[Kind]`. The reasoning for this particular type to be inferred is as follows: + +- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. +- A union type is widened in type inference to the least supertype that is not a union type. + In the example, this type is `Kind & Product & Serializable` since all three traits are traits of both `Val` and `Var`. + So that type becomes the inferred element type of the set. + +Scala 3 allows one to mark a mixin trait as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: + +```scala +transparent trait S +trait Kind +object Var extends Kind, S +object Val extends Kind, S +val x = Set(if condition then Val else Var) +``` + +Now `x` has inferred type `Set[Kind]`. The common transparent trait `S` does not +appear in the inferred type. + +## Transparent Traits + +The traits [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.html), [`java.io.Serializable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/Serializable.html) and [`java.lang.Comparable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Comparable.html) +are treated automatically as transparent. Other traits are turned into transparent traits using the modifier `transparent`. Scala 2 traits can also be made transparent +by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interoperability is no longer needed. + +Typically, transparent traits are traits +that influence the implementation of inheriting classes and traits that are not usually used as types by themselves. Two examples from the standard collection library are: + +- [`IterableOps`](https://scala-lang.org/api/3.x/scala/collection/IterableOps.html), which provides method implementations for an [`Iterable`](https://scala-lang.org/api/3.x/scala/collection/Iterable.html). +- [`StrictOptimizedSeqOps`](https://scala-lang.org/api/3.x/scala/collection/StrictOptimizedSeqOps.html), which optimises some of these implementations for sequences with efficient indexing. + +Generally, any trait that is extended recursively is a good candidate to be +declared transparent. + +## Rules for Inference + +Transparent traits can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference say that transparent traits are dropped from intersections where possible. + +The precise rules are as follows: + +- When inferring a type of a type variable, or the type of a val, or the return type of a def, +- where that type is not higher-kinded, +- and where `B` is its known upper bound or `Any` if none exists: +- If the type inferred so far is of the form `T1 & ... & Tn` where + `n >= 1`, replace the maximal number of transparent `Ti`s by `Any`, while ensuring that + the resulting type is still a subtype of the bound `B`. +- However, do not perform this widening if all transparent traits `Ti` can get replaced in that way. + +The last clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. diff --git a/docs/_spec/TODOreference/other-new-features/type-test.md b/docs/_spec/TODOreference/other-new-features/type-test.md new file mode 100644 index 000000000000..ec7a87230753 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/type-test.md @@ -0,0 +1,181 @@ +--- +layout: doc-page +title: "TypeTest" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/type-test.html +--- + +## TypeTest + +When pattern matching there are two situations where a runtime type test must be performed. +The first case is an explicit type test using the ascription pattern notation. + +```scala +(x: X) match + case y: Y => +``` + +The second case is when an extractor takes an argument that is not a subtype of the scrutinee type. + +```scala +(x: X) match + case y @ Y(n) => + +object Y: + def unapply(x: Y): Some[Int] = ... +``` + +In both cases, a class test will be performed at runtime. +But when the type test is on an abstract type (type parameter or type member), the test cannot be performed because the type is erased at runtime. + +A [`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) can be provided to make this test possible. + +```scala +package scala.reflect + +trait TypeTest[-S, T]: + def unapply(s: S): Option[s.type & T] +``` + +It provides an extractor that returns its argument typed as a `T` if the argument is a `T`. +It can be used to encode a type test. + +```scala +def f[X, Y](x: X)(using tt: TypeTest[X, Y]): Option[Y] = x match + case tt(x @ Y(1)) => Some(x) + case tt(x) => Some(x) + case _ => None +``` + +To avoid the syntactic overhead the compiler will look for a type test automatically if it detects that the type test is on abstract types. +This means that `x: Y` is transformed to `tt(x)` and `x @ Y(_)` to `tt(x @ Y(_))` if there is a contextual `TypeTest[X, Y]` in scope. +The previous code is equivalent to + +```scala +def f[X, Y](x: X)(using TypeTest[X, Y]): Option[Y] = x match + case x @ Y(1) => Some(x) + case x: Y => Some(x) + case _ => None +``` + +We could create a type test at call site where the type test can be performed with runtime class tests directly as follows + +```scala +val tt: TypeTest[Any, String] = + new TypeTest[Any, String]: + def unapply(s: Any): Option[s.type & String] = s match + case s: String => Some(s) + case _ => None + +f[AnyRef, String]("acb")(using tt) +``` + +The compiler will synthesize a new instance of a type test if none is found in scope as: + +```scala +new TypeTest[A, B]: + def unapply(s: A): Option[s.type & B] = s match + case s: B => Some(s) + case _ => None +``` + +If the type tests cannot be done there will be an unchecked warning that will be raised on the `case s: B =>` test. + +The most common [`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) instances are the ones that take any parameters (i.e. `TypeTest[Any, T]`). +To make it possible to use such instances directly in context bounds we provide the alias + +```scala +package scala.reflect + +type Typeable[T] = TypeTest[Any, T] +``` + +This alias can be used as + +```scala +def f[T: Typeable]: Boolean = + "abc" match + case x: T => true + case _ => false + +f[String] // true +f[Int] // false +``` + +## TypeTest and ClassTag + +[`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) is a replacement for functionality provided previously by `ClassTag.unapply`. +Using [`ClassTag`](https://scala-lang.org/api/3.x/scala/reflect/ClassTag.html) instances was unsound since classtags can check only the class component of a type. +[`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) fixes that unsoundness. +[`ClassTag`](https://scala-lang.org/api/3.x/scala/reflect/ClassTag.html) type tests are still supported but a warning will be emitted after 3.0. + + +## Example + +Given the following abstract definition of Peano numbers that provides two given instances of types `TypeTest[Nat, Zero]` and `TypeTest[Nat, Succ]` + +```scala +import scala.reflect.* + +trait Peano: + type Nat + type Zero <: Nat + type Succ <: Nat + + def safeDiv(m: Nat, n: Succ): (Nat, Nat) + + val Zero: Zero + + val Succ: SuccExtractor + trait SuccExtractor: + def apply(nat: Nat): Succ + def unapply(succ: Succ): Some[Nat] + + given typeTestOfZero: TypeTest[Nat, Zero] + given typeTestOfSucc: TypeTest[Nat, Succ] +``` + +together with an implementation of Peano numbers based on type `Int` + +```scala +object PeanoInt extends Peano: + type Nat = Int + type Zero = Int + type Succ = Int + + def safeDiv(m: Nat, n: Succ): (Nat, Nat) = (m / n, m % n) + + val Zero: Zero = 0 + + val Succ: SuccExtractor = new: + def apply(nat: Nat): Succ = nat + 1 + def unapply(succ: Succ) = Some(succ - 1) + + def typeTestOfZero: TypeTest[Nat, Zero] = new: + def unapply(x: Nat): Option[x.type & Zero] = + if x == 0 then Some(x) else None + + def typeTestOfSucc: TypeTest[Nat, Succ] = new: + def unapply(x: Nat): Option[x.type & Succ] = + if x > 0 then Some(x) else None +``` + +it is possible to write the following program + +```scala +@main def test = + import PeanoInt.* + + def divOpt(m: Nat, n: Nat): Option[(Nat, Nat)] = + n match + case Zero => None + case s @ Succ(_) => Some(safeDiv(m, s)) + + val two = Succ(Succ(Zero)) + val five = Succ(Succ(Succ(two))) + + println(divOpt(five, two)) // prints "Some((2,1))" + println(divOpt(two, five)) // prints "Some((0,2))" + println(divOpt(two, Zero)) // prints "None" +``` + +Note that without the `TypeTest[Nat, Succ]` the pattern `Succ.unapply(nat: Succ)` would be unchecked. diff --git a/docs/_spec/TODOreference/overview.md b/docs/_spec/TODOreference/overview.md new file mode 100644 index 000000000000..b1e8281dfc16 --- /dev/null +++ b/docs/_spec/TODOreference/overview.md @@ -0,0 +1,155 @@ +--- +layout: doc-page +title: "Reference" +nightlyOf: https://docs.scala-lang.org/scala3/reference/overview.html +redirectFrom: overview.html +--- + +Scala 3 implements many language changes and improvements over Scala 2. +In this reference, we discuss design decisions and present important differences compared to Scala 2. + +## Goals + +The language redesign was guided by three main goals: + +- Strengthen Scala's foundations. + Make the full programming language compatible with the foundational work on the + [DOT calculus](https://infoscience.epfl.ch/record/227176/files/soundness_oopsla16.pdf) + and apply the lessons learned from that work. +- Make Scala easier and safer to use. + Tame powerful constructs such as implicits to provide a gentler learning curve. Remove warts and puzzlers. +- Further improve the consistency and expressiveness of Scala's language constructs. + +Corresponding to these goals, the language changes fall into seven categories: +(1) Core constructs to strengthen foundations, (2) simplifications and (3) [restrictions](#restrictions), to make the language easier and safer to use, (4) [dropped constructs](#dropped-constructs) to make the language smaller and more regular, (5) [changed constructs](#changes) to remove warts, and increase consistency and usability, (6) [new constructs](#new-constructs) to fill gaps and increase expressiveness, (7) a new, principled approach to metaprogramming that replaces [Scala 2 experimental macros](https://docs.scala-lang.org/overviews/macros/overview.html). + +## Essential Foundations + +These new constructs directly model core features of DOT, higher-kinded types, and the [SI calculus for implicit resolution](https://infoscience.epfl.ch/record/229878/files/simplicitly_1.pdf). + +- [Intersection types](new-types/intersection-types.md), replacing compound types, +- [Union types](new-types/union-types.md), +- [Type lambdas](new-types/type-lambdas.md), replacing encodings using structural types and type projection. +- [Context functions](contextual/context-functions.md), offering abstraction over given parameters. + +## Simplifications + +These constructs replace existing constructs with the aim of making the language safer and simpler to use, and to promote uniformity in code style. + +- [Trait parameters](other-new-features/trait-parameters.md) + replace [early initializers](dropped-features/early-initializers.md) with a more generally useful construct. +- [Given instances](contextual/givens.md) + replace implicit objects and defs, focussing on intent over mechanism. +- [Using clauses](contextual/using-clauses.md) + replace implicit parameters, avoiding their ambiguities. +- [Extension methods](contextual/extension-methods.md) + replace implicit classes with a clearer and simpler mechanism. +- [Opaque type aliases](other-new-features/opaques.md) + replace most uses of value classes while guaranteeing the absence of boxing. +- [Top-level definitions](dropped-features/package-objects.md) + replace package objects, dropping syntactic boilerplate. +- [Export clauses](other-new-features/export.md) + provide a simple and general way to express aggregation, which can replace + the previous facade pattern of package objects inheriting from classes. +- [Vararg splices](changed-features/vararg-splices.md) + now use the form `xs*` in function arguments and patterns instead of `xs: _*` and `xs @ _*`, +- [Universal apply methods](other-new-features/creator-applications.md) + allow using simple function call syntax instead of `new` expressions. `new` expressions stay around + as a fallback for the cases where creator applications cannot be used. + +With the exception of [early initializers](dropped-features/early-initializers.md) and old-style vararg patterns, all superseded constructs continue to be available in Scala 3.0. The plan is to deprecate and phase them out later. + +Value classes (superseded by opaque type aliases) are a special case. There are currently no deprecation plans for value classes, since we might bring them back in a more general form if they are supported natively by the JVM as is planned by [project Valhalla](https://openjdk.java.net/projects/valhalla/). + +## Restrictions + +These constructs are restricted to make the language safer. + +- [Implicit Conversions](contextual/conversions.md): + there is only one way to define implicit conversions instead of many, and potentially surprising implicit conversions require a language import. +- [Given Imports](contextual/given-imports.md): + implicits now require a special form of import, to make the import clearly visible. +- [Type Projection](dropped-features/type-projection.md): + only classes can be used as prefix `C` of a type projection `C#A`. Type projection on abstract types is no longer supported since it is unsound. +- [Multiversal Equality](contextual/multiversal-equality.md): + implement an "opt-in" scheme to rule out nonsensical comparisons with `==` and `!=`. +- [infix](changed-features/operators.md): + make method application syntax uniform across code bases. + +Unrestricted implicit conversions continue to be available in Scala 3.0, but will be deprecated and removed later. Unrestricted versions of the other constructs in the list above are available only under `-source 3.0-migration`. + +## Dropped Constructs + +These constructs are proposed to be dropped without a new construct replacing them. The motivation for dropping these constructs is to simplify the language and its implementation. + +- [DelayedInit](dropped-features/delayed-init.md), +- [Existential types](dropped-features/existential-types.md), +- [Procedure syntax](dropped-features/procedure-syntax.md), +- [Class shadowing](dropped-features/class-shadowing.md), +- [XML literals](dropped-features/xml.md), +- [Symbol literals](dropped-features/symlits.md), +- [Auto application](dropped-features/auto-apply.md), +- [Weak conformance](dropped-features/weak-conformance.md), +- Compound types (replaced by [Intersection types](new-types/intersection-types.md)), +- [Auto tupling](https://github.com/lampepfl/dotty/pull/4311) (implemented, but not merged). + +The date when these constructs are dropped varies. The current status is: + +- Not implemented at all: + - DelayedInit, existential types, weak conformance. +- Supported under `-source 3.0-migration`: + - procedure syntax, class shadowing, symbol literals, auto application, auto tupling in a restricted form. +- Supported in 3.0, to be deprecated and phased out later: + - [XML literals](dropped-features/xml.md), compound types. + +## Changes + +These constructs have undergone changes to make them more regular and useful. + +- [Structural Types](changed-features/structural-types.md): + They now allow pluggable implementations, which greatly increases their usefulness. Some usage patterns are restricted compared to the status quo. +- [Name-based pattern matching](changed-features/pattern-matching.md): + The existing undocumented Scala 2 implementation has been codified in a slightly simplified form. +- [Automatic Eta expansion](changed-features/eta-expansion.md): + Eta expansion is now performed universally also in the absence of an expected type. The postfix `_` operator is thus made redundant. It will be deprecated and dropped after Scala 3.0. +- [Implicit Resolution](changed-features/implicit-resolution.md): + The implicit resolution rules have been cleaned up to make them more useful and less surprising. Implicit scope is restricted to no longer include package prefixes. + +Most aspects of old-style implicit resolution are still available under `-source 3.0-migration`. The other changes in this list are applied unconditionally. + +## New Constructs + +These are additions to the language that make it more powerful or pleasant to use. + +- [Enums](enums/enums.md) provide concise syntax for enumerations and [algebraic data types](enums/adts.md). +- [Parameter untupling](other-new-features/parameter-untupling.md) avoids having to use `case` for tupled parameter destructuring. +- [Dependent function types](new-types/dependent-function-types.md) generalize dependent methods to dependent function values and types. +- [Polymorphic function types](new-types/polymorphic-function-types.md) generalize polymorphic methods to polymorphic function values and types. + _Current status_: There is a proposal and a merged prototype implementation, but the implementation has not been finalized (it is notably missing type inference support). +- [Kind polymorphism](other-new-features/kind-polymorphism.md) allows the definition of operators working equally on types and type constructors. +- [`@targetName` annotations](other-new-features/targetName.md) make it easier to interoperate with code written in other languages and give more flexibility for avoiding name clashes. + +## Metaprogramming + +The following constructs together aim to put metaprogramming in Scala on a new basis. So far, metaprogramming was achieved by a combination of macros and libraries such as [Shapeless](https://github.com/milessabin/shapeless) that were in turn based on some key macros. Current Scala 2 macro mechanisms are a thin veneer on top of the current Scala 2 compiler, which makes them fragile and in many cases impossible to port to Scala 3. + +It's worth noting that macros were never included in the [Scala 2 language specification](https://scala-lang.org/files/archive/spec/2.13/) and were so far made available only under an `-experimental` flag. This has not prevented their widespread usage. + +To enable porting most uses of macros, we are experimenting with the advanced language constructs listed below. These designs are more provisional than the rest of the proposed language constructs for Scala 3.0. There might still be some changes until the final release. Stabilizing the feature set needed for metaprogramming is our first priority. + +- [Match Types](new-types/match-types.md) + allow computation on types. +- [Inline](metaprogramming/inline.md) + provides by itself a straightforward implementation of some simple macros and is at the same time an essential building block for the implementation of complex macros. +- [Quotes and Splices](metaprogramming/macros.md) + provide a principled way to express macros and staging with a unified set of abstractions. +- [Type class derivation](contextual/derivation.md) + provides an in-language implementation of the `Gen` macro in Shapeless and other foundational libraries. The new implementation is more robust, efficient and easier to use than the macro. +- [By-name context parameters](contextual/by-name-context-parameters.md) + provide a more robust in-language implementation of the `Lazy` macro in [Shapeless](https://github.com/milessabin/shapeless). + +## See Also + +[A classification of proposed language features](./features-classification.md) is +an expanded version of this page that adds the status (i.e. relative importance to be a part of Scala 3, and relative urgency when to decide this) and expected migration cost +of each language construct. diff --git a/docs/_spec/TODOreference/soft-modifier.md b/docs/_spec/TODOreference/soft-modifier.md new file mode 100644 index 000000000000..c1329ebab1f0 --- /dev/null +++ b/docs/_spec/TODOreference/soft-modifier.md @@ -0,0 +1,27 @@ +--- +layout: doc-page +title: "Soft Keywords" +nightlyOf: https://docs.scala-lang.org/scala3/reference/soft-modifier.html +--- + +A soft modifier is one of the identifiers `infix`, `inline`, `opaque`, `open` and `transparent`. + +A soft keyword is a soft modifier, or one of `as`, `derives`, `end`, `extension`, `throws`, `using`, `|`, `+`, `-`, `*` + +A soft modifier is treated as potential modifier of a definition if it is followed by a hard modifier or a keyword combination starting a definition (`def`, `val`, `var`, `type`, `given`, `class`, `trait`, `object`, `enum`, `case class`, `case object`). Between the two words there may be a sequence of newline tokens and soft modifiers. + +Otherwise, soft keywords are treated specially in the following situations: + + - `inline`, if it is followed by any token that can start an expression. + - `derives`, if it appears after an extension clause or after + the name and possibly parameters of a class, trait, object, or enum definition. + - `end`, if it appears at the start of a line following a statement (i.e. definition or toplevel expression) + - `extension`, if it appears at the start of a statement and is followed by `(` or `[`. + - `using`, if it appears at the start of a parameter or argument list. + - `as`, in a renaming import clause + - `|`, if it separates two patterns in an alternative. + - `+`, `-`, if they appear in front of a type parameter. + - `*`, in a wildcard import, or it follows the type of a parameter, or if it appears in + a vararg splice `x*`. + +Everywhere else a soft keyword is treated as a normal identifier. diff --git a/docs/_spec/_config.yml b/docs/_spec/_config.yml new file mode 100644 index 000000000000..3cb9c8f0d440 --- /dev/null +++ b/docs/_spec/_config.yml @@ -0,0 +1,11 @@ +baseurl: /files/archive/spec/2.13 +latestScalaVersion: 2.13 +thisScalaVersion: 2.13 +versionCompareMessage: "an upcoming" +safe: true +lsi: false +highlighter: false +markdown: redcarpet +encoding: utf-8 +redcarpet: + extensions: ["no_intra_emphasis", "fenced_code_blocks", "autolink", "tables", "with_toc_data", "strikethrough", "lax_spacing", "space_after_headers", "superscript", "footnotes", "disable_indented_code_blocks"] diff --git a/docs/_spec/_includes/numbering.css b/docs/_spec/_includes/numbering.css new file mode 100644 index 000000000000..2a22ce28b558 --- /dev/null +++ b/docs/_spec/_includes/numbering.css @@ -0,0 +1,60 @@ +h1 { + /* must reset here */ + counter-reset: chapter {{ page.chapter }}; +} +h1:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + content: "Chapter " counter(chapter); + display: block; +} + +h2 { + /* must increment here */ + counter-increment: section; + counter-reset: subsection; +} +h2:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: counter(chapter) "." counter(section) ; + display: inline; + margin-right: 1em; +} +h2:after { + /* can only have one counter-reset per tag, so can't do it in h2/h2:before... */ + counter-reset: example; +} + +h3 { + /* must increment here */ + counter-increment: subsection; +} +h3:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: counter(chapter) "." counter(section) "." counter(subsection); + display: inline; + margin-right: 1em; +} +h3[id*='example'] { + /* must increment here */ + counter-increment: example; + display: inline; +} +h3[id*='example']:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: "Example " counter(chapter) "." counter(section) "." counter(example); + display: inline; + margin-right: 1em; +} + +.no-numbering, .no-numbering:before, .no-numbering:after { + content: normal; + counter-reset: none; + counter-increment: none; +} diff --git a/docs/_spec/_includes/table-of-contents.yml b/docs/_spec/_includes/table-of-contents.yml new file mode 100644 index 000000000000..b70f97da5424 --- /dev/null +++ b/docs/_spec/_includes/table-of-contents.yml @@ -0,0 +1,23 @@ + +
+ +

Table of Contents

+ +
    + {% assign sorted_pages = site.pages | sort:"name" %} + {% for post in sorted_pages %} + + {% if post.chapter >= 0 %} +
  1. + {{ post.title }} +
  2. + {% endif %} + {% endfor %} +
+
+ + diff --git a/docs/_spec/_includes/version-notice.yml b/docs/_spec/_includes/version-notice.yml new file mode 100644 index 000000000000..5a7286631c11 --- /dev/null +++ b/docs/_spec/_includes/version-notice.yml @@ -0,0 +1,3 @@ +{% if site.thisScalaVersion != site.latestScalaVersion %} +
This is the specification of {{ site.versionCompareMessage }} version of Scala. See the Scala {{ site.latestScalaVersion }} spec.
+{% endif %} diff --git a/docs/_spec/_layouts/default.yml b/docs/_spec/_layouts/default.yml new file mode 100644 index 000000000000..2589a105dff2 --- /dev/null +++ b/docs/_spec/_layouts/default.yml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + {{ page.title }} | Scala {{ site.thisScalaVersion }} + + + +
+ +
+ + +
+{% include version-notice.yml %} +{{ content }} +
+ + + + + + + diff --git a/docs/_spec/_layouts/toc.yml b/docs/_spec/_layouts/toc.yml new file mode 100644 index 000000000000..1106222bd088 --- /dev/null +++ b/docs/_spec/_layouts/toc.yml @@ -0,0 +1,34 @@ + + + + + + + + + + {{ page.title }} | Scala {{ site.thisScalaVersion }} + + + + + + + + +
+
+ + Scala Language Specification + Edit at GitHub +
+
Version {{ site.thisScalaVersion }}
+
+
+{% include version-notice.yml %} +{{ content }} +
+ + + + diff --git a/docs/_spec/docker-compose.yml b/docs/_spec/docker-compose.yml new file mode 100644 index 000000000000..3eadc939ed40 --- /dev/null +++ b/docs/_spec/docker-compose.yml @@ -0,0 +1,11 @@ +version: '2' + +services: + jekyll: + user: "${UID}:${GID}" + build: . + command: sh -c "chown $UID / && bundle exec jekyll serve --incremental --host=0.0.0.0 " + ports: + - '4000:4000' + volumes: + - .:/srv/jekyll diff --git a/docs/_spec/index.md b/docs/_spec/index.md new file mode 100644 index 000000000000..df126db7bd44 --- /dev/null +++ b/docs/_spec/index.md @@ -0,0 +1,55 @@ +--- +title: Scala Language Specification +layout: toc +--- + +{% include table-of-contents.yml %} + +#### Authors and Contributors + +Martin Odersky, Philippe Altherr, Vincent Cremet, Gilles Dubochet, Burak Emir, Philipp Haller, Stéphane Micheloud, Nikolay Mihaylov, Adriaan Moors, Lukas Rytz, Michel Schinz, Erik Stenman, Matthias Zenger + +Markdown Conversion by Iain McGinniss. + +#### Preface + +Scala is a Java-like programming language which unifies +object-oriented and functional programming. It is a pure +object-oriented language in the sense that every value is an +object. Types and behavior of objects are described by +classes. Classes can be composed using mixin composition. Scala is +designed to work seamlessly with less pure but mainstream +object-oriented languages like Java. + +Scala is a functional language in the sense that every function is a +value. Nesting of function definitions and higher-order functions are +naturally supported. Scala also supports a general notion of pattern +matching which can model the algebraic types used in many functional +languages. + +Scala has been designed to interoperate seamlessly with Java. +Scala classes can call Java methods, create Java objects, inherit from Java +classes and implement Java interfaces. None of this requires interface +definitions or glue code. + +Scala has been developed from 2001 in the programming methods +laboratory at EPFL. Version 1.0 was released in November 2003. This +document describes the second version of the language, which was +released in March 2006. It acts as a reference for the language +definition and some core library modules. It is not intended to teach +Scala or its concepts; for this there are [other documents](14-references.html). + +Scala has been a collective effort of many people. The design and the +implementation of version 1.0 was completed by Philippe Altherr, +Vincent Cremet, Gilles Dubochet, Burak Emir, Stéphane Micheloud, +Nikolay Mihaylov, Michel Schinz, Erik Stenman, Matthias Zenger, and +the author. Iulian Dragos, Gilles Dubochet, Philipp Haller, Sean +McDirmid, Lex Spoon, and Geoffrey Washburn joined in the effort to +develop the second version of the language and tools. Gilad Bracha, +Craig Chambers, Erik Ernst, Matthias Felleisen, Shriram Krishnamurti, +Gary Leavens, Sebastian Maneth, Erik Meijer, Klaus Ostermann, Didier +Rémy, Mads Torgersen, and Philip Wadler have shaped the design of +the language through lively and inspiring discussions and comments on +previous versions of this document. The contributors to the Scala +mailing list have also given very useful feedback that helped us +improve the language and its tools. diff --git a/docs/_spec/public/favicon.ico b/docs/_spec/public/favicon.ico new file mode 100644 index 000000000000..9eb6ef516488 Binary files /dev/null and b/docs/_spec/public/favicon.ico differ diff --git a/docs/_spec/public/fonts/Heuristica-Bold.woff b/docs/_spec/public/fonts/Heuristica-Bold.woff new file mode 100644 index 000000000000..904579683d54 Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-Bold.woff differ diff --git a/docs/_spec/public/fonts/Heuristica-BoldItalic.woff b/docs/_spec/public/fonts/Heuristica-BoldItalic.woff new file mode 100644 index 000000000000..a3c523445375 Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-BoldItalic.woff differ diff --git a/docs/_spec/public/fonts/Heuristica-Regular.woff b/docs/_spec/public/fonts/Heuristica-Regular.woff new file mode 100644 index 000000000000..f5c1f8b2dbc5 Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-Regular.woff differ diff --git a/docs/_spec/public/fonts/Heuristica-RegularItalic.woff b/docs/_spec/public/fonts/Heuristica-RegularItalic.woff new file mode 100644 index 000000000000..d2c8664593dc Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-RegularItalic.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-Bold.woff b/docs/_spec/public/fonts/LuxiMono-Bold.woff new file mode 100644 index 000000000000..8581bb5aa458 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-Bold.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-BoldOblique.woff b/docs/_spec/public/fonts/LuxiMono-BoldOblique.woff new file mode 100644 index 000000000000..607ccf5cd030 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-BoldOblique.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-Regular.woff b/docs/_spec/public/fonts/LuxiMono-Regular.woff new file mode 100644 index 000000000000..a478ad9ef2dd Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-Regular.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-RegularOblique.woff b/docs/_spec/public/fonts/LuxiMono-RegularOblique.woff new file mode 100644 index 000000000000..26999f990fa9 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-RegularOblique.woff differ diff --git a/docs/_spec/public/fonts/LuxiSans-Bold.woff b/docs/_spec/public/fonts/LuxiSans-Bold.woff new file mode 100644 index 000000000000..162621568b53 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiSans-Bold.woff differ diff --git a/docs/_spec/public/fonts/LuxiSans-Regular.woff b/docs/_spec/public/fonts/LuxiSans-Regular.woff new file mode 100644 index 000000000000..89d980218f7a Binary files /dev/null and b/docs/_spec/public/fonts/LuxiSans-Regular.woff differ diff --git a/docs/_spec/public/images/classhierarchy.pdf b/docs/_spec/public/images/classhierarchy.pdf new file mode 100644 index 000000000000..58e050174b65 Binary files /dev/null and b/docs/_spec/public/images/classhierarchy.pdf differ diff --git a/docs/_spec/public/images/classhierarchy.png b/docs/_spec/public/images/classhierarchy.png new file mode 100644 index 000000000000..3da25ecbf2d5 Binary files /dev/null and b/docs/_spec/public/images/classhierarchy.png differ diff --git a/docs/_spec/public/images/github-logo@2x.png b/docs/_spec/public/images/github-logo@2x.png new file mode 100644 index 000000000000..285b0fee2f32 Binary files /dev/null and b/docs/_spec/public/images/github-logo@2x.png differ diff --git a/docs/_spec/public/images/scala-spiral-white.png b/docs/_spec/public/images/scala-spiral-white.png new file mode 100644 index 000000000000..46aaf80824c1 Binary files /dev/null and b/docs/_spec/public/images/scala-spiral-white.png differ diff --git a/docs/_spec/public/octicons/LICENSE.txt b/docs/_spec/public/octicons/LICENSE.txt new file mode 100644 index 000000000000..259b43d14de3 --- /dev/null +++ b/docs/_spec/public/octicons/LICENSE.txt @@ -0,0 +1,9 @@ +(c) 2012-2014 GitHub + +When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos) + +Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) +Applies to all font files + +Code License: MIT (http://choosealicense.com/licenses/mit/) +Applies to all other files diff --git a/docs/_spec/public/octicons/octicons.css b/docs/_spec/public/octicons/octicons.css new file mode 100644 index 000000000000..a5dcd153a856 --- /dev/null +++ b/docs/_spec/public/octicons/octicons.css @@ -0,0 +1,235 @@ +@font-face { + font-family: 'octicons'; + src: url('octicons.eot?#iefix') format('embedded-opentype'), + url('octicons.woff') format('woff'), + url('octicons.ttf') format('truetype'), + url('octicons.svg#octicons') format('svg'); + font-weight: normal; + font-style: normal; +} + +/* + +.octicon is optimized for 16px. +.mega-octicon is optimized for 32px but can be used larger. + +*/ +.octicon, .mega-octicon { + font: normal normal normal 16px/1 octicons; + display: inline-block; + text-decoration: none; + text-rendering: auto; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} +.mega-octicon { font-size: 32px; } + + +.octicon-alert:before { content: '\f02d'} /*  */ +.octicon-alignment-align:before { content: '\f08a'} /*  */ +.octicon-alignment-aligned-to:before { content: '\f08e'} /*  */ +.octicon-alignment-unalign:before { content: '\f08b'} /*  */ +.octicon-arrow-down:before { content: '\f03f'} /*  */ +.octicon-arrow-left:before { content: '\f040'} /*  */ +.octicon-arrow-right:before { content: '\f03e'} /*  */ +.octicon-arrow-small-down:before { content: '\f0a0'} /*  */ +.octicon-arrow-small-left:before { content: '\f0a1'} /*  */ +.octicon-arrow-small-right:before { content: '\f071'} /*  */ +.octicon-arrow-small-up:before { content: '\f09f'} /*  */ +.octicon-arrow-up:before { content: '\f03d'} /*  */ +.octicon-beer:before { content: '\f069'} /*  */ +.octicon-book:before { content: '\f007'} /*  */ +.octicon-bookmark:before { content: '\f07b'} /*  */ +.octicon-briefcase:before { content: '\f0d3'} /*  */ +.octicon-broadcast:before { content: '\f048'} /*  */ +.octicon-browser:before { content: '\f0c5'} /*  */ +.octicon-bug:before { content: '\f091'} /*  */ +.octicon-calendar:before { content: '\f068'} /*  */ +.octicon-check:before { content: '\f03a'} /*  */ +.octicon-checklist:before { content: '\f076'} /*  */ +.octicon-chevron-down:before { content: '\f0a3'} /*  */ +.octicon-chevron-left:before { content: '\f0a4'} /*  */ +.octicon-chevron-right:before { content: '\f078'} /*  */ +.octicon-chevron-up:before { content: '\f0a2'} /*  */ +.octicon-circle-slash:before { content: '\f084'} /*  */ +.octicon-circuit-board:before { content: '\f0d6'} /*  */ +.octicon-clippy:before { content: '\f035'} /*  */ +.octicon-clock:before { content: '\f046'} /*  */ +.octicon-cloud-download:before { content: '\f00b'} /*  */ +.octicon-cloud-upload:before { content: '\f00c'} /*  */ +.octicon-code:before { content: '\f05f'} /*  */ +.octicon-color-mode:before { content: '\f065'} /*  */ +.octicon-comment-add:before, +.octicon-comment:before { content: '\f02b'} /*  */ +.octicon-comment-discussion:before { content: '\f04f'} /*  */ +.octicon-credit-card:before { content: '\f045'} /*  */ +.octicon-dash:before { content: '\f0ca'} /*  */ +.octicon-dashboard:before { content: '\f07d'} /*  */ +.octicon-database:before { content: '\f096'} /*  */ +.octicon-device-camera:before { content: '\f056'} /*  */ +.octicon-device-camera-video:before { content: '\f057'} /*  */ +.octicon-device-desktop:before { content: '\f27c'} /*  */ +.octicon-device-mobile:before { content: '\f038'} /*  */ +.octicon-diff:before { content: '\f04d'} /*  */ +.octicon-diff-added:before { content: '\f06b'} /*  */ +.octicon-diff-ignored:before { content: '\f099'} /*  */ +.octicon-diff-modified:before { content: '\f06d'} /*  */ +.octicon-diff-removed:before { content: '\f06c'} /*  */ +.octicon-diff-renamed:before { content: '\f06e'} /*  */ +.octicon-ellipsis:before { content: '\f09a'} /*  */ +.octicon-eye-unwatch:before, +.octicon-eye-watch:before, +.octicon-eye:before { content: '\f04e'} /*  */ +.octicon-file-binary:before { content: '\f094'} /*  */ +.octicon-file-code:before { content: '\f010'} /*  */ +.octicon-file-directory:before { content: '\f016'} /*  */ +.octicon-file-media:before { content: '\f012'} /*  */ +.octicon-file-pdf:before { content: '\f014'} /*  */ +.octicon-file-submodule:before { content: '\f017'} /*  */ +.octicon-file-symlink-directory:before { content: '\f0b1'} /*  */ +.octicon-file-symlink-file:before { content: '\f0b0'} /*  */ +.octicon-file-text:before { content: '\f011'} /*  */ +.octicon-file-zip:before { content: '\f013'} /*  */ +.octicon-flame:before { content: '\f0d2'} /*  */ +.octicon-fold:before { content: '\f0cc'} /*  */ +.octicon-gear:before { content: '\f02f'} /*  */ +.octicon-gift:before { content: '\f042'} /*  */ +.octicon-gist:before { content: '\f00e'} /*  */ +.octicon-gist-secret:before { content: '\f08c'} /*  */ +.octicon-git-branch-create:before, +.octicon-git-branch-delete:before, +.octicon-git-branch:before { content: '\f020'} /*  */ +.octicon-git-commit:before { content: '\f01f'} /*  */ +.octicon-git-compare:before { content: '\f0ac'} /*  */ +.octicon-git-merge:before { content: '\f023'} /*  */ +.octicon-git-pull-request-abandoned:before, +.octicon-git-pull-request:before { content: '\f009'} /*  */ +.octicon-globe:before { content: '\f0b6'} /*  */ +.octicon-graph:before { content: '\f043'} /*  */ +.octicon-heart:before { content: '\2665'} /* ♥ */ +.octicon-history:before { content: '\f07e'} /*  */ +.octicon-home:before { content: '\f08d'} /*  */ +.octicon-horizontal-rule:before { content: '\f070'} /*  */ +.octicon-hourglass:before { content: '\f09e'} /*  */ +.octicon-hubot:before { content: '\f09d'} /*  */ +.octicon-inbox:before { content: '\f0cf'} /*  */ +.octicon-info:before { content: '\f059'} /*  */ +.octicon-issue-closed:before { content: '\f028'} /*  */ +.octicon-issue-opened:before { content: '\f026'} /*  */ +.octicon-issue-reopened:before { content: '\f027'} /*  */ +.octicon-jersey:before { content: '\f019'} /*  */ +.octicon-jump-down:before { content: '\f072'} /*  */ +.octicon-jump-left:before { content: '\f0a5'} /*  */ +.octicon-jump-right:before { content: '\f0a6'} /*  */ +.octicon-jump-up:before { content: '\f073'} /*  */ +.octicon-key:before { content: '\f049'} /*  */ +.octicon-keyboard:before { content: '\f00d'} /*  */ +.octicon-law:before { content: '\f0d8'} /* */ +.octicon-light-bulb:before { content: '\f000'} /*  */ +.octicon-link:before { content: '\f05c'} /*  */ +.octicon-link-external:before { content: '\f07f'} /*  */ +.octicon-list-ordered:before { content: '\f062'} /*  */ +.octicon-list-unordered:before { content: '\f061'} /*  */ +.octicon-location:before { content: '\f060'} /*  */ +.octicon-gist-private:before, +.octicon-mirror-private:before, +.octicon-git-fork-private:before, +.octicon-lock:before { content: '\f06a'} /*  */ +.octicon-logo-github:before { content: '\f092'} /*  */ +.octicon-mail:before { content: '\f03b'} /*  */ +.octicon-mail-read:before { content: '\f03c'} /*  */ +.octicon-mail-reply:before { content: '\f051'} /*  */ +.octicon-mark-github:before { content: '\f00a'} /*  */ +.octicon-markdown:before { content: '\f0c9'} /*  */ +.octicon-megaphone:before { content: '\f077'} /*  */ +.octicon-mention:before { content: '\f0be'} /*  */ +.octicon-microscope:before { content: '\f089'} /*  */ +.octicon-milestone:before { content: '\f075'} /*  */ +.octicon-mirror-public:before, +.octicon-mirror:before { content: '\f024'} /*  */ +.octicon-mortar-board:before { content: '\f0d7'} /* */ +.octicon-move-down:before { content: '\f0a8'} /*  */ +.octicon-move-left:before { content: '\f074'} /*  */ +.octicon-move-right:before { content: '\f0a9'} /*  */ +.octicon-move-up:before { content: '\f0a7'} /*  */ +.octicon-mute:before { content: '\f080'} /*  */ +.octicon-no-newline:before { content: '\f09c'} /*  */ +.octicon-octoface:before { content: '\f008'} /*  */ +.octicon-organization:before { content: '\f037'} /*  */ +.octicon-package:before { content: '\f0c4'} /*  */ +.octicon-paintcan:before { content: '\f0d1'} /*  */ +.octicon-pencil:before { content: '\f058'} /*  */ +.octicon-person-add:before, +.octicon-person-follow:before, +.octicon-person:before { content: '\f018'} /*  */ +.octicon-pin:before { content: '\f041'} /*  */ +.octicon-playback-fast-forward:before { content: '\f0bd'} /*  */ +.octicon-playback-pause:before { content: '\f0bb'} /*  */ +.octicon-playback-play:before { content: '\f0bf'} /*  */ +.octicon-playback-rewind:before { content: '\f0bc'} /*  */ +.octicon-plug:before { content: '\f0d4'} /*  */ +.octicon-repo-create:before, +.octicon-gist-new:before, +.octicon-file-directory-create:before, +.octicon-file-add:before, +.octicon-plus:before { content: '\f05d'} /*  */ +.octicon-podium:before { content: '\f0af'} /*  */ +.octicon-primitive-dot:before { content: '\f052'} /*  */ +.octicon-primitive-square:before { content: '\f053'} /*  */ +.octicon-pulse:before { content: '\f085'} /*  */ +.octicon-puzzle:before { content: '\f0c0'} /*  */ +.octicon-question:before { content: '\f02c'} /*  */ +.octicon-quote:before { content: '\f063'} /*  */ +.octicon-radio-tower:before { content: '\f030'} /*  */ +.octicon-repo-delete:before, +.octicon-repo:before { content: '\f001'} /*  */ +.octicon-repo-clone:before { content: '\f04c'} /*  */ +.octicon-repo-force-push:before { content: '\f04a'} /*  */ +.octicon-gist-fork:before, +.octicon-repo-forked:before { content: '\f002'} /*  */ +.octicon-repo-pull:before { content: '\f006'} /*  */ +.octicon-repo-push:before { content: '\f005'} /*  */ +.octicon-rocket:before { content: '\f033'} /*  */ +.octicon-rss:before { content: '\f034'} /*  */ +.octicon-ruby:before { content: '\f047'} /*  */ +.octicon-screen-full:before { content: '\f066'} /*  */ +.octicon-screen-normal:before { content: '\f067'} /*  */ +.octicon-search-save:before, +.octicon-search:before { content: '\f02e'} /*  */ +.octicon-server:before { content: '\f097'} /*  */ +.octicon-settings:before { content: '\f07c'} /*  */ +.octicon-log-in:before, +.octicon-sign-in:before { content: '\f036'} /*  */ +.octicon-log-out:before, +.octicon-sign-out:before { content: '\f032'} /*  */ +.octicon-split:before { content: '\f0c6'} /*  */ +.octicon-squirrel:before { content: '\f0b2'} /*  */ +.octicon-star-add:before, +.octicon-star-delete:before, +.octicon-star:before { content: '\f02a'} /*  */ +.octicon-steps:before { content: '\f0c7'} /*  */ +.octicon-stop:before { content: '\f08f'} /*  */ +.octicon-repo-sync:before, +.octicon-sync:before { content: '\f087'} /*  */ +.octicon-tag-remove:before, +.octicon-tag-add:before, +.octicon-tag:before { content: '\f015'} /*  */ +.octicon-telescope:before { content: '\f088'} /*  */ +.octicon-terminal:before { content: '\f0c8'} /*  */ +.octicon-three-bars:before { content: '\f05e'} /*  */ +.octicon-tools:before { content: '\f031'} /*  */ +.octicon-trashcan:before { content: '\f0d0'} /*  */ +.octicon-triangle-down:before { content: '\f05b'} /*  */ +.octicon-triangle-left:before { content: '\f044'} /*  */ +.octicon-triangle-right:before { content: '\f05a'} /*  */ +.octicon-triangle-up:before { content: '\f0aa'} /*  */ +.octicon-unfold:before { content: '\f039'} /*  */ +.octicon-unmute:before { content: '\f0ba'} /*  */ +.octicon-versions:before { content: '\f064'} /*  */ +.octicon-remove-close:before, +.octicon-x:before { content: '\f081'} /*  */ +.octicon-zap:before { content: '\26A1'} /* ⚡ */ diff --git a/docs/_spec/public/octicons/octicons.eot b/docs/_spec/public/octicons/octicons.eot new file mode 100644 index 000000000000..22881a8b6c43 Binary files /dev/null and b/docs/_spec/public/octicons/octicons.eot differ diff --git a/docs/_spec/public/octicons/octicons.svg b/docs/_spec/public/octicons/octicons.svg new file mode 100644 index 000000000000..ea3e0f161528 --- /dev/null +++ b/docs/_spec/public/octicons/octicons.svg @@ -0,0 +1,198 @@ + + + + +(c) 2012-2014 GitHub + +When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos) + +Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) +Applies to all font files + +Code License: MIT (http://choosealicense.com/licenses/mit/) +Applies to all other files + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/_spec/public/octicons/octicons.ttf b/docs/_spec/public/octicons/octicons.ttf new file mode 100644 index 000000000000..189ca2813d49 Binary files /dev/null and b/docs/_spec/public/octicons/octicons.ttf differ diff --git a/docs/_spec/public/octicons/octicons.woff b/docs/_spec/public/octicons/octicons.woff new file mode 100644 index 000000000000..2b770e429f38 Binary files /dev/null and b/docs/_spec/public/octicons/octicons.woff differ diff --git a/docs/_spec/public/scripts/LICENSE-highlight b/docs/_spec/public/scripts/LICENSE-highlight new file mode 100644 index 000000000000..fe2f67b1628e --- /dev/null +++ b/docs/_spec/public/scripts/LICENSE-highlight @@ -0,0 +1,24 @@ +Copyright (c) 2006, Ivan Sagalaev +All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of highlight.js nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/_spec/public/scripts/LICENSE-toc b/docs/_spec/public/scripts/LICENSE-toc new file mode 100644 index 000000000000..4e236e8696c3 --- /dev/null +++ b/docs/_spec/public/scripts/LICENSE-toc @@ -0,0 +1,18 @@ +(The MIT License) +Copyright (c) 2013 Greg Allen +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/docs/_spec/public/scripts/highlight.pack.js b/docs/_spec/public/scripts/highlight.pack.js new file mode 100644 index 000000000000..bfeca09abb51 --- /dev/null +++ b/docs/_spec/public/scripts/highlight.pack.js @@ -0,0 +1 @@ +var hljs=new function(){function j(v){return v.replace(/&/gm,"&").replace(//gm,">")}function t(v){return v.nodeName.toLowerCase()}function h(w,x){var v=w&&w.exec(x);return v&&v.index==0}function r(w){var v=(w.className+" "+(w.parentNode?w.parentNode.className:"")).split(/\s+/);v=v.map(function(x){return x.replace(/^lang(uage)?-/,"")});return v.filter(function(x){return i(x)||/no(-?)highlight/.test(x)})[0]}function o(x,y){var v={};for(var w in x){v[w]=x[w]}if(y){for(var w in y){v[w]=y[w]}}return v}function u(x){var v=[];(function w(y,z){for(var A=y.firstChild;A;A=A.nextSibling){if(A.nodeType==3){z+=A.nodeValue.length}else{if(A.nodeType==1){v.push({event:"start",offset:z,node:A});z=w(A,z);if(!t(A).match(/br|hr|img|input/)){v.push({event:"stop",offset:z,node:A})}}}}return z})(x,0);return v}function q(w,y,C){var x=0;var F="";var z=[];function B(){if(!w.length||!y.length){return w.length?w:y}if(w[0].offset!=y[0].offset){return(w[0].offset"}function E(G){F+=""}function v(G){(G.event=="start"?A:E)(G.node)}while(w.length||y.length){var D=B();F+=j(C.substr(x,D[0].offset-x));x=D[0].offset;if(D==w){z.reverse().forEach(E);do{v(D.splice(0,1)[0]);D=B()}while(D==w&&D.length&&D[0].offset==x);z.reverse().forEach(A)}else{if(D[0].event=="start"){z.push(D[0].node)}else{z.pop()}v(D.splice(0,1)[0])}}return F+j(C.substr(x))}function m(y){function v(z){return(z&&z.source)||z}function w(A,z){return RegExp(v(A),"m"+(y.cI?"i":"")+(z?"g":""))}function x(D,C){if(D.compiled){return}D.compiled=true;D.k=D.k||D.bK;if(D.k){var z={};var E=function(G,F){if(y.cI){F=F.toLowerCase()}F.split(" ").forEach(function(H){var I=H.split("|");z[I[0]]=[G,I[1]?Number(I[1]):1]})};if(typeof D.k=="string"){E("keyword",D.k)}else{Object.keys(D.k).forEach(function(F){E(F,D.k[F])})}D.k=z}D.lR=w(D.l||/\b[A-Za-z0-9_]+\b/,true);if(C){if(D.bK){D.b="\\b("+D.bK.split(" ").join("|")+")\\b"}if(!D.b){D.b=/\B|\b/}D.bR=w(D.b);if(!D.e&&!D.eW){D.e=/\B|\b/}if(D.e){D.eR=w(D.e)}D.tE=v(D.e)||"";if(D.eW&&C.tE){D.tE+=(D.e?"|":"")+C.tE}}if(D.i){D.iR=w(D.i)}if(D.r===undefined){D.r=1}if(!D.c){D.c=[]}var B=[];D.c.forEach(function(F){if(F.v){F.v.forEach(function(G){B.push(o(F,G))})}else{B.push(F=="self"?D:F)}});D.c=B;D.c.forEach(function(F){x(F,D)});if(D.starts){x(D.starts,C)}var A=D.c.map(function(F){return F.bK?"\\.?("+F.b+")\\.?":F.b}).concat([D.tE,D.i]).map(v).filter(Boolean);D.t=A.length?w(A.join("|"),true):{exec:function(F){return null}}}x(y)}function c(T,L,J,R){function v(V,W){for(var U=0;U";V+=aa+'">';return V+Y+Z}function N(){if(!I.k){return j(C)}var U="";var X=0;I.lR.lastIndex=0;var V=I.lR.exec(C);while(V){U+=j(C.substr(X,V.index-X));var W=E(I,V);if(W){H+=W[1];U+=w(W[0],j(V[0]))}else{U+=j(V[0])}X=I.lR.lastIndex;V=I.lR.exec(C)}return U+j(C.substr(X))}function F(){if(I.sL&&!f[I.sL]){return j(C)}var U=I.sL?c(I.sL,C,true,S):e(C);if(I.r>0){H+=U.r}if(I.subLanguageMode=="continuous"){S=U.top}return w(U.language,U.value,false,true)}function Q(){return I.sL!==undefined?F():N()}function P(W,V){var U=W.cN?w(W.cN,"",true):"";if(W.rB){D+=U;C=""}else{if(W.eB){D+=j(V)+U;C=""}else{D+=U;C=V}}I=Object.create(W,{parent:{value:I}})}function G(U,Y){C+=U;if(Y===undefined){D+=Q();return 0}var W=v(Y,I);if(W){D+=Q();P(W,Y);return W.rB?0:Y.length}var X=z(I,Y);if(X){var V=I;if(!(V.rE||V.eE)){C+=Y}D+=Q();do{if(I.cN){D+=""}H+=I.r;I=I.parent}while(I!=X.parent);if(V.eE){D+=j(Y)}C="";if(X.starts){P(X.starts,"")}return V.rE?0:Y.length}if(A(Y,I)){throw new Error('Illegal lexeme "'+Y+'" for mode "'+(I.cN||"")+'"')}C+=Y;return Y.length||1}var M=i(T);if(!M){throw new Error('Unknown language: "'+T+'"')}m(M);var I=R||M;var S;var D="";for(var K=I;K!=M;K=K.parent){if(K.cN){D=w(K.cN,"",true)+D}}var C="";var H=0;try{var B,y,x=0;while(true){I.t.lastIndex=x;B=I.t.exec(L);if(!B){break}y=G(L.substr(x,B.index-x),B[0]);x=B.index+y}G(L.substr(x));for(var K=I;K.parent;K=K.parent){if(K.cN){D+=""}}return{r:H,value:D,language:T,top:I}}catch(O){if(O.message.indexOf("Illegal")!=-1){return{r:0,value:j(L)}}else{throw O}}}function e(y,x){x=x||b.languages||Object.keys(f);var v={r:0,value:j(y)};var w=v;x.forEach(function(z){if(!i(z)){return}var A=c(z,y,false);A.language=z;if(A.r>w.r){w=A}if(A.r>v.r){w=v;v=A}});if(w.language){v.second_best=w}return v}function g(v){if(b.tabReplace){v=v.replace(/^((<[^>]+>|\t)+)/gm,function(w,z,y,x){return z.replace(/\t/g,b.tabReplace)})}if(b.useBR){v=v.replace(/\n/g,"
")}return v}function p(A){var B=r(A);if(/no(-?)highlight/.test(B)){return}var y;if(b.useBR){y=document.createElementNS("http://www.w3.org/1999/xhtml","div");y.innerHTML=A.innerHTML.replace(/\n/g,"").replace(//g,"\n")}else{y=A}var z=y.textContent;var v=B?c(B,z,true):e(z);var x=u(y);if(x.length){var w=document.createElementNS("http://www.w3.org/1999/xhtml","div");w.innerHTML=v.value;v.value=q(x,u(w),z)}v.value=g(v.value);A.innerHTML=v.value;A.className+=" hljs "+(!B&&v.language||"");A.result={language:v.language,re:v.r};if(v.second_best){A.second_best={language:v.second_best.language,re:v.second_best.r}}}var b={classPrefix:"hljs-",tabReplace:null,useBR:false,languages:undefined};function s(v){b=o(b,v)}function l(){if(l.called){return}l.called=true;var v=document.querySelectorAll("pre code");Array.prototype.forEach.call(v,p)}function a(){addEventListener("DOMContentLoaded",l,false);addEventListener("load",l,false)}var f={};var n={};function d(v,x){var w=f[v]=x(this);if(w.aliases){w.aliases.forEach(function(y){n[y]=v})}}function k(){return Object.keys(f)}function i(v){return f[v]||f[n[v]]}this.highlight=c;this.highlightAuto=e;this.fixMarkup=g;this.highlightBlock=p;this.configure=s;this.initHighlighting=l;this.initHighlightingOnLoad=a;this.registerLanguage=d;this.listLanguages=k;this.getLanguage=i;this.inherit=o;this.IR="[a-zA-Z][a-zA-Z0-9_]*";this.UIR="[a-zA-Z_][a-zA-Z0-9_]*";this.NR="\\b\\d+(\\.\\d+)?";this.CNR="(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)";this.BNR="\\b(0b[01]+)";this.RSR="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~";this.BE={b:"\\\\[\\s\\S]",r:0};this.ASM={cN:"string",b:"'",e:"'",i:"\\n",c:[this.BE]};this.QSM={cN:"string",b:'"',e:'"',i:"\\n",c:[this.BE]};this.PWM={b:/\b(a|an|the|are|I|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such)\b/};this.CLCM={cN:"comment",b:"//",e:"$",c:[this.PWM]};this.CBCM={cN:"comment",b:"/\\*",e:"\\*/",c:[this.PWM]};this.HCM={cN:"comment",b:"#",e:"$",c:[this.PWM]};this.NM={cN:"number",b:this.NR,r:0};this.CNM={cN:"number",b:this.CNR,r:0};this.BNM={cN:"number",b:this.BNR,r:0};this.CSSNM={cN:"number",b:this.NR+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",r:0};this.RM={cN:"regexp",b:/\//,e:/\/[gim]*/,i:/\n/,c:[this.BE,{b:/\[/,e:/\]/,r:0,c:[this.BE]}]};this.TM={cN:"title",b:this.IR,r:0};this.UTM={cN:"title",b:this.UIR,r:0}}();hljs.registerLanguage("scala",function(d){var b={cN:"annotation",b:"@[A-Za-z]+"};var c={cN:"string",b:'u?r?"""',e:'"""',r:10};var a={cN:"symbol",b:"'\\w[\\w\\d_]*(?!')"};var e={cN:"type",b:"\\b[A-Z][A-Za-z0-9_]*",r:0};var h={cN:"title",b:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,r:0};var i={cN:"class",bK:"class object trait type",e:/[:={\[(\n;]/,c:[{cN:"keyword",bK:"extends with",r:10},h]};var g={cN:"function",bK:"def val",e:/[:={\[(\n;]/,c:[h]};var f={cN:"javadoc",b:"/\\*\\*",e:"\\*/",c:[{cN:"javadoctag",b:"@[A-Za-z]+"}],r:10};return{k:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},c:[d.CLCM,d.CBCM,c,d.QSM,a,e,g,i,d.CNM,b]}}); \ No newline at end of file diff --git a/docs/_spec/public/scripts/main.js b/docs/_spec/public/scripts/main.js new file mode 100644 index 000000000000..9ade9c770f1e --- /dev/null +++ b/docs/_spec/public/scripts/main.js @@ -0,0 +1,71 @@ +function currentChapter() { + var path = document.location.pathname; + var idx = path.lastIndexOf("/") + 1; + var chap = path.substring(idx, idx + 2); + return parseInt(chap, 10); +} + +function heading(i, heading, $heading) { + var currentLevel = parseInt(heading.tagName.substring(1)); + var result = ""; + if (currentLevel === this.headerLevel) { + this.headerCounts[this.headerLevel] += 1; + return "" + this.headerCounts[this.headerLevel] + " " + $heading.text(); + } else if (currentLevel < this.headerLevel) { + while(currentLevel < this.headerLevel) { + this.headerCounts[this.headerLevel] = 1; + this.headerLevel -= 1; + } + this.headerCounts[this.headerLevel] += 1; + return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); + } else { + while(currentLevel > this.headerLevel) { + this.headerLevel += 1; + this.headerCounts[this.headerLevel] = 1; + } + return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); + } +} + +// ignore when using wkhtmltopdf, or it won't work... +if(window.jekyllEnv !== 'spec-pdf') { + $('#toc').toc( + { + 'selectors': 'h1,h2,h3', + 'smoothScrolling': false, + 'chapter': currentChapter(), + 'headerLevel': 1, + 'headerCounts': [-1, currentChapter() - 1, 1, 1], + 'headerText': heading + } + ); +} + +// no language auto-detect so that EBNF isn't detected as scala +hljs.configure({ + languages: [] +}); + +// KaTeX configuration +document.addEventListener("DOMContentLoaded", function() { + renderMathInElement(document.body, { + delimiters: [ + {left: "´", right: "´", display: false}, // "display: false" -> inline + {left: "$$", right: "$$", display: true} + ], + ignoredTags: ['script', 'noscript', 'style', 'textarea'], + }); + // syntax highlighting after KaTeX is loaded, + // so that math can be used in code blocks + hljs.initHighlighting(); + $("pre nobr").addClass("fixws"); + // point when all necessary js is done, so PDF to be rendered + window.status = "loaded"; +}); + +$("#chapters a").each(function (index) { + if (document.location.pathname.endsWith($(this).attr("href"))) + $(this).addClass("chapter-active"); + else + $(this).removeClass("chapter-active"); +}); diff --git a/docs/_spec/public/scripts/toc.js b/docs/_spec/public/scripts/toc.js new file mode 100644 index 000000000000..5b0bded12cfc --- /dev/null +++ b/docs/_spec/public/scripts/toc.js @@ -0,0 +1,128 @@ +/*! + * toc - jQuery Table of Contents Plugin + * v0.3.2 + * http://projects.jga.me/toc/ + * copyright Greg Allen 2014 + * MIT License +*/ +(function($) { +var verboseIdCache = {}; +$.fn.toc = function(options) { + var self = this; + var opts = $.extend({}, jQuery.fn.toc.defaults, options); + + var container = $(opts.container); + var headings = $(opts.selectors, container); + var headingOffsets = []; + var activeClassName = opts.activeClass; + + var scrollTo = function(e, callback) { + $('li', self).removeClass(activeClassName); + $(e.target).parent().addClass(activeClassName); + }; + + //highlight on scroll + var timeout; + var highlightOnScroll = function(e) { + if (timeout) { + clearTimeout(timeout); + } + timeout = setTimeout(function() { + var top = $(window).scrollTop(), + highlighted, closest = Number.MAX_VALUE, index = 0; + + for (var i = 0, c = headingOffsets.length; i < c; i++) { + var currentClosest = Math.abs(headingOffsets[i] - top); + if (currentClosest < closest) { + index = i; + closest = currentClosest; + } + } + + $('li', self).removeClass(activeClassName); + highlighted = $('li:eq('+ index +')', self).addClass(activeClassName); + opts.onHighlight(highlighted); + }, 50); + }; + if (opts.highlightOnScroll) { + $(window).on('scroll', highlightOnScroll); + highlightOnScroll(); + } + + return this.each(function() { + //build TOC + var el = $(this); + var ul = $(opts.listType); + + headings.each(function(i, heading) { + var $h = $(heading); + headingOffsets.push($h.offset().top - opts.highlightOffset); + + var anchorName = opts.anchorName(i, heading, opts.prefix); + + //add anchor + if(heading.id !== anchorName) { + var anchor = $('').attr('id', anchorName).insertBefore($h); + } + + //build TOC item + var a = $('') + .text(opts.headerText(i, heading, $h)) + .attr('href', '#' + anchorName) + .on('click', function(e) { + $(window).off('scroll', highlightOnScroll); + scrollTo(e, function() { + $(window).on('scroll', highlightOnScroll); + }); + el.trigger('selected', $(this).attr('href')); + }); + + var li = $('
  • ') + .addClass(opts.itemClass(i, heading, $h, opts.prefix)) + .append(a); + + ul.append(li); + }); + el.html(ul); + }); +}; + + +jQuery.fn.toc.defaults = { + container: 'body', + listType: '
      ', + selectors: 'h1,h2,h3', + prefix: 'toc', + activeClass: 'toc-active', + onHighlight: function() {}, + highlightOnScroll: true, + highlightOffset: 100, + anchorName: function(i, heading, prefix) { + if(heading.id.length) { + return heading.id; + } + + var candidateId = $(heading).text().replace(/[^a-z0-9]/ig, ' ').replace(/\s+/g, '-').toLowerCase(); + if (verboseIdCache[candidateId]) { + var j = 2; + + while(verboseIdCache[candidateId + j]) { + j++; + } + candidateId = candidateId + '-' + j; + + } + verboseIdCache[candidateId] = true; + + return prefix + '-' + candidateId; + }, + headerText: function(i, heading, $heading) { + return $heading.text(); + }, + itemClass: function(i, heading, $heading, prefix) { + return prefix + '-' + $heading[0].tagName.toLowerCase(); + } + +}; + +})(jQuery); diff --git a/docs/_spec/public/stylesheets/fonts.css b/docs/_spec/public/stylesheets/fonts.css new file mode 100644 index 000000000000..36efb2bbd5a0 --- /dev/null +++ b/docs/_spec/public/stylesheets/fonts.css @@ -0,0 +1,73 @@ +@font-face { + font-family: 'Luxi Sans'; + src: local('Luxi Sans Regular'), + url('../fonts/LuxiSans-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} + +@font-face { + font-family: 'Luxi Sans'; + src: local('Luxi Sans Bold'), + url('../fonts/LuxiSans-Bold.woff') format('woff'); + font-weight: bold; + font-style: normal; +} + +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Regular'), + url('../fonts/LuxiMono-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Oblique'), + url('../fonts/LuxiMono-BoldOblique.woff') format('woff'); + font-weight: normal; + font-style: oblique; +} +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Bold'), + url('../fonts/LuxiMono-Bold.woff') format('woff'); + font-weight: bold; + font-style: normal; +} +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Bold Oblique'), + url('../fonts/LuxiMono-BoldOblique.woff') format('woff'); + font-weight: bold; + font-style: oblique; +} + +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Regular'), + url('../fonts/Heuristica-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Italic'), + url('../fonts/Heuristica-RegularItalic.woff') format('woff'); + font-weight: normal; + font-style: italic; +} +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Bold'), + url('../fonts/Heuristica-Bold.woff') format('woff'); + font-weight: bold; + font-style: normal; +} +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Bold Italic'), + url('../fonts/Heuristica-BoldItalic.woff') format('woff'); + font-weight: bold; + font-style: italic; +} diff --git a/docs/_spec/public/stylesheets/print.css b/docs/_spec/public/stylesheets/print.css new file mode 100644 index 000000000000..f0efff28b203 --- /dev/null +++ b/docs/_spec/public/stylesheets/print.css @@ -0,0 +1,42 @@ +/* This removes a few things from screen.css for printing */ + +body { + padding: 0px; + margin: 0px; +} + +.anchor, #navigation, .to_top, .version-notice, .hidden-print { + display: none !important; +} + +.print-only { + display: block; +} + +#content-container { + width: 100%; + float: none; +} + +/* no scrollbars, jump to next row.. */ +.highlight pre code { + overflow: hidden; + white-space: pre-wrap; +} + +main { + position: relative; + top: 32px; + margin: 0 0 0 0; + padding: 0px 32px; + max-width: none; + min-width: none; + min-height: none; + background-color: #FFF; +} + +/* Avoid clipped headings https://github.com/pdfkit/pdfkit/issues/113#issuecomment-7027798 */ +h2, h3, h4, h5, h6 { + padding: 0px; + margin: 0px; +} diff --git a/docs/_spec/public/stylesheets/screen-small.css b/docs/_spec/public/stylesheets/screen-small.css new file mode 100644 index 000000000000..674db7c49000 --- /dev/null +++ b/docs/_spec/public/stylesheets/screen-small.css @@ -0,0 +1,57 @@ +body { + padding: 0px; + margin: 0px; +} +aside.left { + position: relative; + margin: 0px auto; + overflow: visible; + height: inherit; + margin-bottom: 40px; + background-color: #073642; +} +header { + position: relative; + height: inherit; + min-height: 32px; +} +main { + max-width: 1000px; + min-width: 600px; + margin: 0 auto; +} + +#chapters a { + font-size: 14px; + max-height: 32px; + padding: 4px 8px; + white-space: nowrap; + display: inline-block; +} +#chapters > #github { + padding: 14px; +} + +#toc { + overflow: visible; +} +#toc .toc-active { + background: inherit; +} +#toc .toc-h1 { + display: inherit; +} +#toc .toc-h1 a { + padding-left: 10px; + color: #FFFFFF; + background: #72D0EB; +} +#toc .toc-h2 a { + padding-left: 30px; +} +#toc .toc-h3 a { + padding-left: 50px; +} +#toc a { + font-size: 14px; +} diff --git a/docs/_spec/public/stylesheets/screen-toc.css b/docs/_spec/public/stylesheets/screen-toc.css new file mode 100644 index 000000000000..7a04bd00f96c --- /dev/null +++ b/docs/_spec/public/stylesheets/screen-toc.css @@ -0,0 +1,37 @@ +body { + padding: 0px; + margin: 0px; +} +header { + height: 96px; + padding: 0px; + width: 100%; + position: relative; + color: #FFFFFF; +} +#header-main { + height: 68px; + line-height: 1.2; + font-size: 32px; +} +#header-sub { + padding-left: 64px; + height: 28px; + background-color:#72D0EB; + vertical-align: middle; +} +#scala-logo { + padding: 10px; +} +#title { + vertical-align: middle; +} +#github { + height: 40px; + padding: 14px; + float: right; + font-size: 0px; +} +li { + margin: 5px; +} diff --git a/docs/_spec/public/stylesheets/screen.css b/docs/_spec/public/stylesheets/screen.css new file mode 100644 index 000000000000..2073613eaea7 --- /dev/null +++ b/docs/_spec/public/stylesheets/screen.css @@ -0,0 +1,521 @@ +/* from https://gist.github.com/andyferra/2554919 */ + +body { + font-family:Heuristica,Georgia,serif; + color: #222222; + line-height: 1.6; + + padding-bottom: 10px; + background-color: white; + padding-left: 30px; +} + +#content-container > *:first-child { + margin-top: 0 !important; +} +#content-container > *:last-child { + margin-bottom: 0 !important; +} + +a { + color: #08C; + text-decoration: none; +} +a:hover, a:focus { + +} +a.absent { + color: #cc0000; +} +a.anchor { + display: block; + margin-left: -35px; + padding-left: 10px; + cursor: pointer; + position: absolute; + top: 0; + left: 0; + bottom: 0; + color: black; + width: 35px; height: 100%; +} + +a.anchor span { + vertical-align: middle; +} + +h1, h2, h3, h4, h5, h6 { + margin: 30px 0 0px; + padding: 0; + /* Fix anchor position due to header */ + padding-top: 32px; + margin-top: -32px; + font-weight: bold; + -webkit-font-smoothing: antialiased; + cursor: text; + position: relative; + pointer-events: none; +} + +h1, h2 { + font-weight: normal; +} + +h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, h5:hover a.anchor, h6:hover a.anchor { + text-decoration: none; +} + +h1:hover a.anchor span, h2:hover a.anchor span, h3:hover a.anchor span, h4:hover a.anchor span, h5:hover a.anchor span, h6:hover a.anchor span { + display: inline-block; +} + +h1 a.anchor span, h2 a.anchor span, h3 a.anchor span, h4 a.anchor span, h5 a.anchor span, h6 a.anchor span { + display: none; +} + +h1 a.anchor:hover span, h2 a.anchor:hover span, h3 a.anchor:hover span, h4 a.anchor:hover span, h5 a.anchor:hover span, h6 a.anchor:hover span { + display: inline-block; +} + +h1 tt, h1 code { + font-size: inherit; +} + +h2 tt, h2 code { + font-size: inherit; +} + +h3 tt, h3 code { + font-size: inherit; +} + +h4 tt, h4 code { + font-size: inherit; +} + +h5 tt, h5 code { + font-size: inherit; +} + +h6 tt, h6 code { + font-size: inherit; +} + +h1 { + font-size: 28px; + color: black; +} + +h2 { + font-size: 24px; + color: black; +} + +h3 { + font-size: 18px; +} + +h4 { + font-size: 16px; +} + +h5 { + font-size: 14px; +} + +h6 { + color: #777777; + font-size: 14px; +} + +p, blockquote, ul, ol, dl, li, table, pre { + margin: 5px 0 15px; + -moz-font-feature-settings: "onum"; + -ms-font-feature-settings: "onum"; + -webkit-font-feature-settings: "onum"; + font-feature-settings: "onum"; +} + +hr { + background: transparent repeat-x 0 0; + border: 0 none; + color: #cccccc; + height: 4px; + padding: 0; +} + +body > h2:first-child { + margin-top: 0; + padding-top: 0; +} +body > h1:first-child { + margin-top: 0; + padding-top: 0; +} +body > h1:first-child + h2 { + margin-top: 0; + padding-top: 0; +} +body > h3:first-child, body > h4:first-child, body > h5:first-child, body > h6:first-child { + margin-top: 0; + padding-top: 0; +} + +a:first-child h1, a:first-child h2, a:first-child h3, a:first-child h4, a:first-child h5, a:first-child h6 { + margin-top: 0; + padding-top: 0; +} + +h1 p, h2 p, h3 p, h4 p, h5 p, h6 p { + margin-top: 0; +} + +li p.first { + display: inline-block; +} + +ul, ol { + padding-left: 30px; +} + +ul :first-child, ol :first-child { + margin-top: 0; +} + +ul :last-child, ol :last-child { + margin-bottom: 0; +} + +dl { + padding: 0; +} +dl dt { + font-size: 14px; + font-weight: bold; + font-style: italic; + padding: 0; + margin: 15px 0 5px; +} +dl dt:first-child { + padding: 0; +} +dl dt > :first-child { + margin-top: 0; +} +dl dt > :last-child { + margin-bottom: 0; +} +dl dd { + margin: 0 0 15px; + padding: 0 15px; +} +dl dd > :first-child { + margin-top: 0; +} +dl dd > :last-child { + margin-bottom: 0; +} + +blockquote { + border-left: 4px solid #dddddd; + padding: 0 15px; + color: #222222; +} +blockquote > :first-child { + margin-top: 0; +} +blockquote > :last-child { + margin-bottom: 0; +} +blockquote:before { + content: "Example"; + color: #777777; + font-size: 14px; + font-weight: bold; +} + +table { + padding: 0; + margin: 0; + border: none; + border-collapse: collapse; +} +table tr { + background-color: white; +} +table tr:nth-child(2n) { + background-color: #f8f8f8; +} +table tr th { + background-color: #EAEAEA; + font-weight: bold; + text-align: left; + padding: 5px 13px; +} +table tr td { + text-align: left; + padding: 5px 13px; +} +table tr th :first-child, table tr td :first-child { + margin-top: 0; +} +table tr th :last-child, table tr td :last-child { + margin-bottom: 0; +} + +img { + max-width: 100%; +} + +span.frame { + display: block; + overflow: hidden; +} +span.frame > span { + border: 1px solid #dddddd; + display: block; + float: left; + overflow: hidden; + margin: 13px 0 0; + padding: 7px; + width: auto; +} +span.frame span img { + display: block; + float: left; +} +span.frame span span { + clear: both; + color: #333333; + display: block; + padding: 5px 0 0; +} +span.align-center { + display: block; + overflow: hidden; + clear: both; +} +span.align-center > span { + display: block; + overflow: hidden; + margin: 13px auto 0; + text-align: center; +} +span.align-center span img { + margin: 0 auto; + text-align: center; +} +span.align-right { + display: block; + overflow: hidden; + clear: both; +} +span.align-right > span { + display: block; + overflow: hidden; + margin: 13px 0 0; + text-align: right; +} +span.align-right span img { + margin: 0; + text-align: right; +} +span.float-left { + display: block; + margin-right: 13px; + overflow: hidden; + float: left; +} +span.float-left span { + margin: 13px 0 0; +} +span.float-right { + display: block; + margin-left: 13px; + overflow: hidden; + float: right; +} +span.float-right > span { + display: block; + overflow: hidden; + margin: 13px auto 0; + text-align: right; +} + +pre, code, tt { + font:14px "Luxi Mono", 'andale mono', 'lucida console', monospace; + line-height:1.5; +} + +.highlight pre { + background-color: #F8F8F8; + border-radius: 3px; + overflow: auto; + padding: 6px 10px; + white-space: nowrap; +} + +code { + background-color: transparent; + border: none; + margin: 0; + padding: 0; + white-space: pre; +} + +aside.left { + height: 100%; + position: fixed; + direction: rtl; + overflow: auto; + left: 0px; + width: 320px; + bottom: -32px; + font-family: "Luxi Sans", serif; + background-color: #073642; +} + +aside.left > nav { + direction: ltr; + top: 32px; + padding-bottom: 32px; +} + +article, aside, details, figcaption, figure, footer, header, hgroup, main, nav, section, summary { + display: block; +} + +audio, canvas, img, svg, video { + vertical-align: middle; +} + +audio, canvas, progress, video { + display: inline-block; + vertical-align: baseline; +} + +main { + position: relative; + top: 32px; + margin: 0 0 0 320px; + padding: 0px 32px; + max-width: 800px; + min-width: 800px; + min-height: 580px; + background-color: #FFF; +} + +header { + position: fixed; + top: 0px; + left: 0px; + height: 32px; + width: 100%; + background-color: #002B36; + margin: 0px 0px; + padding: 0px 0px; + font-family: "Luxi Sans", serif; + font-weight: bold; + z-index: 10; + overflow: hidden; + text-shadow: 1px 1px 0px rgba(0, 43, 54, 0.15); +} + +#chapters a { + color: #FFFFFF; + text-decoration: none; + font-size: 0.63vw; + padding: 100% 5px; +} + +#chapters a:hover, #chapters a:focus, #github:hover, #github:focus { + background: #DC322F; + -webkit-transition: background .2s ease-in; + -moz-transition: background .2s ease-in; + -ms-transition: background .2s ease-in; + -o-transition: background .2s ease-in; + transition: background .2s ease-in; +} + +#chapters a.chapter-active { + background: #72D0EB; +} + + +#toc ul { + margin: 0; + padding: 0; + list-style: none; +} + +#toc li { + margin: 0; + padding: 0; +} + +#toc a { + color: #FFFFFF; /*#073642;*/ + font-weight: bold; + font-size: 12px; + display: block; + text-shadow: 1px 1px 0px rgba(0, 43, 54, 0.15); +} + +#toc a:hover, #toc a:focus { + background: #DC322F; + text-decoration: none; + -webkit-transition: background .2s ease-in; + -moz-transition: background .2s ease-in; + -ms-transition: background .2s ease-in; + -o-transition: background .2s ease-in; + transition: background .2s ease-in; +} + +#toc .toc-h1 { + display: none; +} + +#toc .toc-h2 a { + padding-left: 10px; +} + +#toc .toc-h3 a { + padding-left: 30px; +} + +#toc .toc-active { + background: #72D0EB; +} + +#toc .toc-active a { + color: #FFFFFF; +} + +#chapters > #github { + padding: 0px; + float: right; +} + +.hljs{ + background: #f8f8f8; +} +/* proper rendering of MathJax into highlighted code blocks */ +.fixws { white-space: pre; } +.fixws .math { white-space: nowrap; } + +.version-notice { + background-color: #C93A3A; + color: #f2f2f2; + border:1px solid #ccc; + padding: 1em; + margin-bottom: 1em; +} +.version-notice a { + color: #f2f2f2; + font-weight: bold; + text-decoration: underline; +} + +.print-only { + display: none; +} diff --git a/docs/_spec/spec-toc.xslt b/docs/_spec/spec-toc.xslt new file mode 100644 index 000000000000..437b15e3e6f4 --- /dev/null +++ b/docs/_spec/spec-toc.xslt @@ -0,0 +1,64 @@ + + + + + + + Table of Contents + + + ./public/stylesheets/fonts.css + + + + +

      Table of Contents

      +
      + + +
      + +
    • + + + +
        + added to prevent self-closing tags in QtXmlPatterns + +
      +
    • + + diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 4d2fa6d22eb1..30ad05d18cf1 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -149,6 +149,7 @@ subsection: - page: reference/experimental/numeric-literals.md - page: reference/experimental/explicit-nulls.md - page: reference/experimental/main-annotation.md + - page: reference/experimental/into-modifier.md - page: reference/experimental/cc.md - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md @@ -164,11 +165,18 @@ subsection: directory: docs/contributing index: contributing/index.md subsection: - - page: contributing/contribute-knowledge.md - page: contributing/getting-started.md - - page: contributing/workflow.md - - page: contributing/testing.md - - page: contributing/debugging.md + - index: contributing/workflow.md + subsection: + - page: contributing/issues/reproduce.md + - page: contributing/issues/cause.md + - page: contributing/issues/areas.md + - page: contributing/issues/debugging.md + - page: contributing/issues/other-debugging.md + - page: contributing/issues/inspection.md + - page: contributing/issues/efficiency.md + - page: contributing/issues/testing.md + - page: contributing/issues/checklist.md - title: IDEs and Tools directory: tools index: contributing/tools/index.md @@ -177,17 +185,27 @@ subsection: - page: contributing/tools/mill.md - page: contributing/tools/scalafix.md - title: Procedures + directory: procedures index: contributing/procedures/index.md subsection: - page: contributing/procedures/release.md - page: contributing/procedures/vulpix.md + - title: High Level Architecture + directory: architecture + index: contributing/architecture/index.md + subsection: + - page: contributing/architecture/lifecycle.md + - page: contributing/architecture/context.md + - page: contributing/architecture/phases.md + - page: contributing/architecture/types.md + - page: contributing/architecture/time.md + - page: contributing/architecture/symbols.md - title: Internals directory: docs/internals index: internals/index.md subsection: - page: internals/backend.md - page: internals/classpaths.md - - page: internals/core-data-structures.md - page: internals/contexts.md - page: internals/dotc-scalac.md - page: internals/higher-kinded-v2.md diff --git a/interfaces/src/dotty/tools/dotc/interfaces/Diagnostic.java b/interfaces/src/dotty/tools/dotc/interfaces/Diagnostic.java index c46360afaa3d..19878a2fa105 100644 --- a/interfaces/src/dotty/tools/dotc/interfaces/Diagnostic.java +++ b/interfaces/src/dotty/tools/dotc/interfaces/Diagnostic.java @@ -1,6 +1,7 @@ package dotty.tools.dotc.interfaces; import java.util.Optional; +import java.util.List; /** A diagnostic is a message emitted during the compilation process. * @@ -23,4 +24,7 @@ public interface Diagnostic { /** @return The position in a source file of the code that caused this diagnostic * to be emitted. */ Optional position(); + + /** @return A list of additional messages together with their code positions */ + List diagnosticRelatedInformation(); } diff --git a/interfaces/src/dotty/tools/dotc/interfaces/DiagnosticRelatedInformation.java b/interfaces/src/dotty/tools/dotc/interfaces/DiagnosticRelatedInformation.java new file mode 100644 index 000000000000..3ebea03f4362 --- /dev/null +++ b/interfaces/src/dotty/tools/dotc/interfaces/DiagnosticRelatedInformation.java @@ -0,0 +1,6 @@ +package dotty.tools.dotc.interfaces; + +public interface DiagnosticRelatedInformation { + SourcePosition position(); + String message(); +} diff --git a/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala b/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala new file mode 100644 index 000000000000..87208573eff9 --- /dev/null +++ b/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala @@ -0,0 +1,8 @@ +package scala.scalajs.runtime + +import scala.scalajs.js + +@inline +final class AnonFunctionXXL(f: js.Function1[IArray[Object], Object]) extends scala.runtime.FunctionXXL { + override def apply(xs: IArray[Object]): Object = f(xs) +} diff --git a/library/src/scala/CanEqual.scala b/library/src/scala/CanEqual.scala index dfb4ec7d2bfc..8c331bb21b43 100644 --- a/library/src/scala/CanEqual.scala +++ b/library/src/scala/CanEqual.scala @@ -1,7 +1,7 @@ package scala import annotation.implicitNotFound -import scala.collection.{Seq, Set} +import scala.collection.{Seq, Set, Map} /** A marker trait indicating that values of type `L` can be compared to values of type `R`. */ @implicitNotFound("Values of types ${L} and ${R} cannot be compared with == or !=") @@ -26,7 +26,7 @@ object CanEqual { given canEqualNumber: CanEqual[Number, Number] = derived given canEqualString: CanEqual[String, String] = derived - // The next 6 definitions can go into the companion objects of their corresponding + // The following definitions can go into the companion objects of their corresponding // classes. For now they are here in order not to have to touch the // source code of these classes given canEqualSeqs[T, U](using eq: CanEqual[T, U]): CanEqual[Seq[T], Seq[U]] = derived @@ -34,6 +34,10 @@ object CanEqual { given canEqualSet[T, U](using eq: CanEqual[T, U]): CanEqual[Set[T], Set[U]] = derived + given canEqualMap[K1, V1, K2, V2]( + using eqK: CanEqual[K1, K2], eqV: CanEqual[V1, V2] + ): CanEqual[Map[K1, V1], Map[K2, V2]] = derived + given canEqualOptions[T, U](using eq: CanEqual[T, U]): CanEqual[Option[T], Option[U]] = derived given canEqualOption[T](using eq: CanEqual[T, T]): CanEqual[Option[T], Option[T]] = derived // for `case None` in pattern matching diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 703f8a1e2992..fa72e320b560 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -83,7 +83,7 @@ sealed trait Tuple extends Product { object Tuple { /** Type of a tuple with an element appended */ - type Append[X <: Tuple, Y] <: Tuple = X match { + type Append[X <: Tuple, Y] <: NonEmptyTuple = X match { case EmptyTuple => Y *: EmptyTuple case x *: xs => x *: Append[xs, Y] } diff --git a/library/src/scala/annotation/MacroAnnotation.scala b/library/src/scala/annotation/MacroAnnotation.scala new file mode 100644 index 000000000000..5c39ef45f417 --- /dev/null +++ b/library/src/scala/annotation/MacroAnnotation.scala @@ -0,0 +1,212 @@ +// TODO in which package should this class be located? +package scala +package annotation + +import scala.quoted._ + +/** Base trait for macro annotation implementation. + * Macro annotations can transform definitions and add new definitions. + * + * See: `MacroAnnotation.transform` + * + * @syntax markdown + */ +@experimental +trait MacroAnnotation extends StaticAnnotation: + + /** Transform the `tree` definition and add new definitions + * + * This method takes as argument the annotated definition. + * It returns a non-empty list containing the modified version of the annotated definition. + * The new tree for the definition must use the original symbol. + * New definitions can be added to the list before or after the transformed definitions, this order + * will be retained. New definitions will not be visible from outside the macro expansion. + * + * #### Restrictions + * - All definitions in the result must have the same owner. The owner can be recovered from `Symbol.spliceOwner`. + * - Special case: an annotated top-level `def`, `val`, `var`, `lazy val` can return a `class`/`object` +definition that is owned by the package or package object. + * - Can not return a `type`. + * - Annotated top-level `class`/`object` can not return top-level `def`, `val`, `var`, `lazy val`. + * - Can not see new definition in user written code. + * + * #### Good practices + * - Make your new definitions private if you can. + * - New definitions added as class members should use a fresh name (`Symbol.freshName`) to avoid collisions. + * - New top-level definitions should use a fresh name (`Symbol.freshName`) that includes the name of the annotated + * member as a prefix to avoid collisions of definitions added in other files. + * + * **IMPORTANT**: When developing and testing a macro annotation, you must enable `-Xcheck-macros` and `-Ycheck:all`. + * + * #### Example 1 + * This example shows how to modify a `def` and add a `val` next to it using a macro annotation. + * ```scala + * import scala.quoted.* + * import scala.collection.mutable + * + * class memoize extends MacroAnnotation: + * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + * import quotes.reflect._ + * tree match + * case DefDef(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(rhsTree)) => + * (param.tpt.tpe.asType, tpt.tpe.asType) match + * case ('[t], '[u]) => + * val cacheName = Symbol.freshName(name + "Cache") + * val cacheSymbol = Symbol.newVal(Symbol.spliceOwner, cacheName, TypeRepr.of[mutable.Map[t, u]], Flags.Private, Symbol.noSymbol) + * val cacheRhs = + * given Quotes = cacheSymbol.asQuotes + * '{ mutable.Map.empty[t, u] }.asTerm + * val cacheVal = ValDef(cacheSymbol, Some(cacheRhs)) + * val newRhs = + * given Quotes = tree.symbol.asQuotes + * val cacheRefExpr = Ref(cacheSymbol).asExprOf[mutable.Map[t, u]] + * val paramRefExpr = Ref(param.symbol).asExprOf[t] + * val rhsExpr = rhsTree.asExprOf[u] + * '{ $cacheRefExpr.getOrElseUpdate($paramRefExpr, $rhsExpr) }.asTerm + * val newTree = DefDef.copy(tree)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) + * List(cacheVal, newTree) + * case _ => + * report.error("Annotation only supported on `def` with a single argument are supported") + * List(tree) + * ``` + * with this macro annotation a user can write + * ```scala + * //{ + * class memoize extends scala.annotation.StaticAnnotation + * //} + * @memoize + * def fib(n: Int): Int = + * println(s"compute fib of $n") + * if n <= 1 then n else fib(n - 1) + fib(n - 2) + * ``` + * and the macro will modify the definition to create + * ```scala + * val fibCache$macro$1 = + * scala.collection.mutable.Map.empty[Int, Int] + * def fib(n: Int): Int = + * fibCache$macro$1.getOrElseUpdate( + * n, + * { + * println(s"compute fib of $n") + * if n <= 1 then n else fib(n - 1) + fib(n - 2) + * } + * ) + * ``` + * + * #### Example 2 + * This example shows how to modify a `class` using a macro annotation. + * It shows how to override inherited members and add new ones. + * ```scala + * import scala.annotation.{experimental, MacroAnnotation} + * import scala.quoted.* + * + * @experimental + * class equals extends MacroAnnotation: + * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + * import quotes.reflect.* + * tree match + * case ClassDef(className, ctr, parents, self, body) => + * val cls = tree.symbol + * + * val constructorParameters = ctr.paramss.collect { case clause: TermParamClause => clause } + * if constructorParameters.size != 1 || constructorParameters.head.params.isEmpty then + * report.errorAndAbort("@equals class must have a single argument list with at least one argument", ctr.pos) + * def checkNotOverridden(sym: Symbol): Unit = + * if sym.overridingSymbol(cls).exists then + * report.error(s"Cannot override ${sym.name} in a @equals class") + * + * val fields = body.collect { + * case vdef: ValDef if vdef.symbol.flags.is(Flags.ParamAccessor) => + * Select(This(cls), vdef.symbol).asExpr + * } + * + * val equalsSym = Symbol.requiredMethod("java.lang.Object.equals") + * checkNotOverridden(equalsSym) + * val equalsOverrideSym = Symbol.newMethod(cls, "equals", equalsSym.info, Flags.Override, Symbol.noSymbol) + * def equalsOverrideDefBody(argss: List[List[Tree]]): Option[Term] = + * given Quotes = equalsOverrideSym.asQuotes + * cls.typeRef.asType match + * case '[c] => + * Some(equalsExpr[c](argss.head.head.asExpr, fields).asTerm) + * val equalsOverrideDef = DefDef(equalsOverrideSym, equalsOverrideDefBody) + * + * val hashSym = Symbol.newVal(cls, Symbol.freshName("hash"), TypeRepr.of[Int], Flags.Private | Flags.Lazy, Symbol.noSymbol) + * val hashVal = ValDef(hashSym, Some(hashCodeExpr(className, fields)(using hashSym.asQuotes).asTerm)) + * + * val hashCodeSym = Symbol.requiredMethod("java.lang.Object.hashCode") + * checkNotOverridden(hashCodeSym) + * val hashCodeOverrideSym = Symbol.newMethod(cls, "hashCode", hashCodeSym.info, Flags.Override, Symbol.noSymbol) + * val hashCodeOverrideDef = DefDef(hashCodeOverrideSym, _ => Some(Ref(hashSym))) + * + * val newBody = equalsOverrideDef :: hashVal :: hashCodeOverrideDef :: body + * List(ClassDef.copy(tree)(className, ctr, parents, self, newBody)) + * case _ => + * report.error("Annotation only supports `class`") + * List(tree) + * + * private def equalsExpr[T: Type](that: Expr[Any], thisFields: List[Expr[Any]])(using Quotes): Expr[Boolean] = + * '{ + * $that match + * case that: T @unchecked => + * ${ + * val thatFields: List[Expr[Any]] = + * import quotes.reflect.* + * thisFields.map(field => Select('{that}.asTerm, field.asTerm.symbol).asExpr) + * thisFields.zip(thatFields) + * .map { case (thisField, thatField) => '{ $thisField == $thatField } } + * .reduce { case (pred1, pred2) => '{ $pred1 && $pred2 } } + * } + * case _ => false + * } + * + * private def hashCodeExpr(className: String, thisFields: List[Expr[Any]])(using Quotes): Expr[Int] = + * '{ + * var acc: Int = ${ Expr(scala.runtime.Statics.mix(-889275714, className.hashCode)) } + * ${ + * Expr.block( + * thisFields.map { + * case '{ $field: Boolean } => '{ if $field then 1231 else 1237 } + * case '{ $field: Byte } => '{ $field.toInt } + * case '{ $field: Char } => '{ $field.toInt } + * case '{ $field: Short } => '{ $field.toInt } + * case '{ $field: Int } => field + * case '{ $field: Long } => '{ scala.runtime.Statics.longHash($field) } + * case '{ $field: Double } => '{ scala.runtime.Statics.doubleHash($field) } + * case '{ $field: Float } => '{ scala.runtime.Statics.floatHash($field) } + * case '{ $field: Null } => '{ 0 } + * case '{ $field: Unit } => '{ 0 } + * case field => '{ scala.runtime.Statics.anyHash($field) } + * }.map(hash => '{ acc = scala.runtime.Statics.mix(acc, $hash) }), + * '{ scala.runtime.Statics.finalizeHash(acc, ${Expr(thisFields.size)}) } + * ) + * } + * } + * ``` + * with this macro annotation a user can write + * ```scala + * //{ + * class equals extends scala.annotation.StaticAnnotation + * //} + * @equals class User(val name: String, val id: Int) + * ``` + * and the macro will modify the class definition to generate the following code + * ```scala + * class User(val name: String, val id: Int): + * override def equals(that: Any): Boolean = + * that match + * case that: User => this.name == that.name && this.id == that.id + * case _ => false + * private lazy val hash$macro$1: Int = + * var acc = 515782504 // scala.runtime.Statics.mix(-889275714, "User".hashCode) + * acc = scala.runtime.Statics.mix(acc, scala.runtime.Statics.anyHash(name)) + * acc = scala.runtime.Statics.mix(acc, id) + * scala.runtime.Statics.finalizeHash(acc, 2) + * override def hashCode(): Int = hash$macro$1 + * ``` + * + * @param Quotes Implicit instance of Quotes used for tree reflection + * @param tree Tree that will be transformed + * + * @syntax markdown + */ + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] diff --git a/library/src/scala/annotation/allowConversions.scala b/library/src/scala/annotation/allowConversions.scala new file mode 100644 index 000000000000..9d752ee26d21 --- /dev/null +++ b/library/src/scala/annotation/allowConversions.scala @@ -0,0 +1,10 @@ +package scala.annotation +import annotation.experimental + +/** An annotation on a parameter type that allows implicit conversions + * for its arguments. Intended for use by Scala 2, to annotate Scala 2 + * libraries. Scala 3 uses the `into` modifier on the parameter + * type instead. + */ +@experimental +class allowConversions extends scala.annotation.StaticAnnotation diff --git a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala new file mode 100644 index 000000000000..477ac6d742f7 --- /dev/null +++ b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala @@ -0,0 +1,12 @@ +package scala.annotation +package unchecked + +/** An annotation for mutable variables that are allowed to capture + * the root capability `cap`. Allowing this is not capture safe since + * it can cause leakage of capabilities from local scopes by assigning + * values retaining such capabilties to the annotated variable in + * an outer scope. + */ +class uncheckedCaptures extends StaticAnnotation + + diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index 21b2f7a4dece..866e5dbd18cd 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -4,15 +4,45 @@ import annotation.experimental @experimental object caps: - /** The universal capture reference */ + /** The universal capture reference (deprecated) */ + @deprecated("Use `cap` instead") val `*`: Any = () - /** If argument is of type `cs T`, converts to type `box cs T`. This - * avoids the error that would be raised when boxing `*`. + /** The universal capture reference */ + val cap: Any = () + + object unsafe: + + extension [T](x: T) + /** If argument is of type `cs T`, converts to type `box cs T`. This + * avoids the error that would be raised when boxing `*`. + */ + @deprecated(since = "3.3") + def unsafeBox: T = x + + /** If argument is of type `box cs T`, converts to type `cs T`. This + * avoids the error that would be raised when unboxing `*`. + */ + @deprecated(since = "3.3") + def unsafeUnbox: T = x + + extension [T, U](f: T => U) + /** If argument is of type `box cs T`, converts to type `cs T`. This + * avoids the error that would be raised when unboxing `*`. + */ + @deprecated(since = "3.3") + def unsafeBoxFunArg: T => U = f + + end unsafe + + /** An annotation that expresses the sealed modifier on a type parameter + * Should not be directly referred to in source */ - extension [T](x: T) def unsafeBox: T = x + @deprecated("The Sealed annotation should not be directly used in source code.\nUse the `sealed` modifier on type parameters instead.") + class Sealed extends annotation.Annotation - /** If argument is of type `box cs T`, converts to type `cs T`. This - * avoids the error that would be raised when unboxing `*`. + /** Mixing in this trait forces a trait or class to be pure, i.e. + * have no capabilities retained in its self type. */ - extension [T](x: T) def unsafeUnbox: T = x + trait Pure: + this: Pure => diff --git a/library/src/scala/deriving/Mirror.scala b/library/src/scala/deriving/Mirror.scala index 5de219dfe5c4..57453a516567 100644 --- a/library/src/scala/deriving/Mirror.scala +++ b/library/src/scala/deriving/Mirror.scala @@ -52,7 +52,6 @@ object Mirror { extension [T](p: ProductOf[T]) /** Create a new instance of type `T` with elements taken from product `a`. */ - @annotation.experimental def fromProductTyped[A <: scala.Product, Elems <: p.MirroredElemTypes](a: A)(using m: ProductOf[A] { type MirroredElemTypes = Elems }): T = p.fromProduct(a) diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala index 996fe3ff8da2..8243e7dc4a4b 100644 --- a/library/src/scala/quoted/Expr.scala +++ b/library/src/scala/quoted/Expr.scala @@ -103,7 +103,7 @@ object Expr { case 20 => ofTupleFromSeq20(seq) case 21 => ofTupleFromSeq21(seq) case 22 => ofTupleFromSeq22(seq) - case _ => '{ Tuple.fromIArray(IArray(${Varargs(seq)}: _*)) } + case _ => ofTupleFromSeqXXL(seq) } } @@ -214,6 +214,18 @@ object Expr { case Seq('{ $x1: t1 }, '{ $x2: t2 }, '{ $x3: t3 }, '{ $x4: t4 }, '{ $x5: t5 }, '{ $x6: t6 }, '{ $x7: t7 }, '{ $x8: t8 }, '{ $x9: t9 }, '{ $x10: t10 }, '{ $x11: t11 }, '{ $x12: t12 }, '{ $x13: t13 }, '{ $x14: t14 }, '{ $x15: t15 }, '{ $x16: t16 }, '{ $x17: t17 }, '{ $x18: t18 }, '{ $x19: t19 }, '{ $x20: t20 }, '{ $x21: t21 }, '{ $x22: t22 }) => '{ Tuple22($x1, $x2, $x3, $x4, $x5, $x6, $x7, $x8, $x9, $x10, $x11, $x12, $x13, $x14, $x15, $x16, $x17, $x18, $x19, $x20, $x21, $x22) } + private def ofTupleFromSeqXXL(seq: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = + val tupleTpe = tupleTypeFromSeq(seq) + tupleTpe.asType match + case '[tpe] => + '{ Tuple.fromIArray(IArray(${Varargs(seq)}*)).asInstanceOf[tpe & Tuple] } + + private def tupleTypeFromSeq(seq: Seq[Expr[Any]])(using Quotes): quotes.reflect.TypeRepr = + import quotes.reflect.* + val consRef = Symbol.classSymbol("scala.*:").typeRef + seq.foldLeft(TypeRepr.of[EmptyTuple]) { (ts, expr) => + AppliedType(consRef, expr.asTerm.tpe :: ts :: Nil) + } /** Given a tuple of the form `(Expr[A1], ..., Expr[An])`, outputs a tuple `Expr[(A1, ..., An)]`. */ def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): Expr[Tuple.InverseMap[T, Expr]] = { diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 3e2863f2260b..b6e5a12da2d8 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -1,6 +1,7 @@ package scala.quoted import scala.annotation.experimental +import scala.annotation.implicitNotFound import scala.reflect.TypeTest /** Current Quotes in scope @@ -14,14 +15,32 @@ import scala.reflect.TypeTest * } * ``` */ -transparent inline def quotes(using inline q: Quotes): q.type = q +transparent inline def quotes(using q: Quotes): q.type = q /** Quotation context provided by a macro expansion or in the scope of `scala.quoted.staging.run`. * Used to perform all operations on quoted `Expr` or `Type`. * * It contains the low-level Typed AST API metaprogramming API. * This API does not have the static type guarantees that `Expr` and `Type` provide. + * `Quotes` are generated from an enclosing `${ ... }` or `scala.staging.run`. For example: + * ```scala sc:nocompile + * import scala.quoted._ + * inline def myMacro: Expr[T] = + * ${ /* (quotes: Quotes) ?=> */ myExpr } + * def myExpr(using Quotes): Expr[T] = + * '{ f(${ /* (quotes: Quotes) ?=> */ myOtherExpr }) } + * } + * def myOtherExpr(using Quotes): Expr[U] = '{ ... } + * ``` */ + +@implicitNotFound("""explain=Maybe this method is missing a `(using Quotes)` parameter. + +Maybe that splice `$ { ... }` is missing? +Given instances of `Quotes` are generated from an enclosing splice `$ { ... }` (or `scala.staging.run` call). +A splice can be thought as a method with the following signature. + def $[T](body: Quotes ?=> Expr[T]): T +""") trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // Extension methods for `Expr[T]` @@ -467,9 +486,33 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * otherwise the can be `Term` containing the `New` applied to the parameters of the extended class. * @param body List of members of the class. The members must align with the members of `cls`. */ + // TODO add selfOpt: Option[ValDef]? @experimental def apply(cls: Symbol, parents: List[Tree /* Term | TypeTree */], body: List[Statement]): ClassDef def copy(original: Tree)(name: String, constr: DefDef, parents: List[Tree /* Term | TypeTree */], selfOpt: Option[ValDef], body: List[Statement]): ClassDef def unapply(cdef: ClassDef): (String, DefDef, List[Tree /* Term | TypeTree */], Option[ValDef], List[Statement]) + + + /** Create the ValDef and ClassDef of a module (equivalent to an `object` declaration in source code). + * + * Equivalent to + * ``` + * def module(module: Symbol, parents: List[Tree], body: List[Statement]): (ValDef, ClassDef) = + * val modCls = module.moduleClass + * val modClassDef = ClassDef(modCls, parents, body) + * val modValDef = ValDef(module, Some(Apply(Select(New(TypeIdent(modCls)), cls.primaryConstructor), Nil))) + * List(modValDef, modClassDef) + * ``` + * + * @param module the module symbol (created using `Symbol.newModule`) + * @param parents parents of the module class + * @param body body of the module class + * @return The module lazy val definition and module class definition. + * These should be added one after the other (in that order) in the body of a class or statements of a block. + * + * @syntax markdown + */ + // TODO add selfOpt: Option[ValDef]? + @experimental def module(module: Symbol, parents: List[Tree /* Term | TypeTree */], body: List[Statement]): (ValDef, ClassDef) } /** Makes extension methods on `ClassDef` available without any imports */ @@ -1386,9 +1429,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * ) * ``` * - * @param owner: owner of the generated `meth` symbol - * @param tpe: Type of the definition - * @param rhsFn: Function that receives the `meth` symbol and the a list of references to the `params` + * @param owner owner of the generated `meth` symbol + * @param tpe Type of the definition + * @param rhsFn Function that receives the `meth` symbol and the a list of references to the `params` */ def apply(owner: Symbol, tpe: MethodType, rhsFn: (Symbol, List[Tree]) => Tree): Block } @@ -1721,7 +1764,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Returns a type tree reference to the symbol * - * @param sym The type symbol for which we are creating a type tree reference. + * @param typeSymbol The type symbol for which we are creating a type tree reference. */ def ref(typeSymbol: Symbol): TypeTree } @@ -2350,7 +2393,16 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is this a given parameter clause `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ def isGiven: Boolean /** Is this a erased parameter clause `(erased x1: X1, ..., xn: Xn)` */ + // TODO:deprecate in 3.4 and stabilize `erasedArgs` and `hasErasedArgs`. + // @deprecated("Use `hasErasedArgs`","3.4") def isErased: Boolean + + /** List of `erased` flags for each parameter of the clause */ + @experimental + def erasedArgs: List[Boolean] + /** Whether the clause has any erased parameters */ + @experimental + def hasErasedArgs: Boolean end TermParamClauseMethods /** A type parameter clause `[X1, ..., Xn]` */ @@ -2626,7 +2678,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => */ def isContextFunctionType: Boolean - /** Is this type an erased function type? + /** Is this type a function type with erased parameters? * * @see `isFunctionType` */ @@ -3119,9 +3171,19 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Extension methods of `MethodType` */ trait MethodTypeMethods: extension (self: MethodType) - /** Is this the type of given parameter clause `(implicit X1, ..., Xn)`, `(given X1, ..., Xn)` or `(given x1: X1, ..., xn: Xn)` */ + /** Is this the type of using parameter clause `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ def isImplicit: Boolean + /** Is this the type of erased parameter clause `(erased x1: X1, ..., xn: Xn)` */ + // TODO:deprecate in 3.4 and stabilize `erasedParams` and `hasErasedParams`. + // @deprecated("Use `hasErasedParams`","3.4") def isErased: Boolean + + /** List of `erased` flags for each parameters of the clause */ + @experimental + def erasedParams: List[Boolean] + /** Whether the clause has any erased parameters */ + @experimental + def hasErasedParams: Boolean def param(idx: Int): TypeRepr end extension end MethodTypeMethods @@ -3638,8 +3700,67 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ + // TODO: add flags and privateWithin @experimental def newClass(parent: Symbol, name: String, parents: List[TypeRepr], decls: Symbol => List[Symbol], selfType: Option[TypeRepr]): Symbol + /** Generates a new module symbol with an associated module class symbol, + * this is equivalent to an `object` declaration in source code. + * This method returns the module symbol. The module class can be accessed calling `moduleClass` on this symbol. + * + * Example usage: + * ```scala + * //{ + * given Quotes = ??? + * import quotes.reflect._ + * //} + * val moduleName: String = Symbol.freshName("MyModule") + * val parents = List(TypeTree.of[Object]) + * def decls(cls: Symbol): List[Symbol] = + * List(Symbol.newMethod(cls, "run", MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Unit]), Flags.EmptyFlags, Symbol.noSymbol)) + * + * val mod = Symbol.newModule(Symbol.spliceOwner, moduleName, Flags.EmptyFlags, Flags.EmptyFlags, parents.map(_.tpe), decls, Symbol.noSymbol) + * val cls = mod.moduleClass + * val runSym = cls.declaredMethod("run").head + * + * val runDef = DefDef(runSym, _ => Some('{ println("run") }.asTerm)) + * val modDef = ClassDef.module(mod, parents, body = List(runDef)) + * + * val callRun = Apply(Select(Ref(mod), runSym), Nil) + * + * Block(modDef.toList, callRun) + * ``` + * constructs the equivalent to + * ```scala + * //{ + * given Quotes = ??? + * import quotes.reflect._ + * //} + * '{ + * object MyModule$macro$1 extends Object: + * def run(): Unit = println("run") + * MyModule$macro$1.run() + * } + * ``` + * + * @param parent The owner of the class + * @param name The name of the class + * @param modFlags extra flags with which the module symbol should be constructed + * @param clsFlags extra flags with which the module class symbol should be constructed + * @param parents The parent classes of the class. The first parent must not be a trait. + * @param decls A function that takes the symbol of the module class as input and return the symbols of its declared members + * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. + * + * This symbol starts without an accompanying definition. + * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing + * this symbol to `ClassDef.module`. + * + * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be + * direct or indirect children of the reflection context's owner. + * + * @syntax markdown + */ + @experimental def newModule(owner: Symbol, name: String, modFlags: Flags, clsFlags: Flags, parents: List[TypeRepr], decls: Symbol => List[Symbol], privateWithin: Symbol): Symbol + /** Generates a new method symbol with the given parent, name and type. * * To define a member method of a class, use the `newMethod` within the `decls` function of `newClass`. @@ -3664,9 +3785,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * @param parent The owner of the method * @param name The name of the method * @param tpe The type of the method (MethodType, PolyType, ByNameType) - * @param flags extra flags to with which the symbol should be constructed + * @param flags extra flags to with which the symbol should be constructed. `Method` flag will be added. Can be `Private | Protected | Override | Deferred | Final | Method | Implicit | Given | Local | JavaStatic` * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. */ + // Keep: `flags` doc aligned with QuotesImpl's `validMethodFlags` def newMethod(parent: Symbol, name: String, tpe: TypeRepr, flags: Flags, privateWithin: Symbol): Symbol /** Generates a new val/var/lazy val symbol with the given parent, name and type. @@ -3675,16 +3797,17 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing * this symbol to the ValDef constructor. * - * Note: Also see reflect.let + * Note: Also see ValDef.let * * @param parent The owner of the val/var/lazy val * @param name The name of the val/var/lazy val * @param tpe The type of the val/var/lazy val - * @param flags extra flags to with which the symbol should be constructed + * @param flags extra flags to with which the symbol should be constructed. Can be `Private | Protected | Override | Deferred | Final | Param | Implicit | Lazy | Mutable | Local | ParamAccessor | Module | Package | Case | CaseAccessor | Given | Enum | JavaStatic` * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ + // Keep: `flags` doc aligned with QuotesImpl's `validValFlags` def newVal(parent: Symbol, name: String, tpe: TypeRepr, flags: Flags, privateWithin: Symbol): Symbol /** Generates a pattern bind symbol with the given parent, name and type. @@ -3695,15 +3818,28 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * * @param parent The owner of the binding * @param name The name of the binding - * @param flags extra flags to with which the symbol should be constructed + * @param flags extra flags to with which the symbol should be constructed. `Case` flag will be added. Can be `Case` * @param tpe The type of the binding * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ + // Keep: `flags` doc aligned with QuotesImpl's `validBindFlags` def newBind(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol /** Definition not available */ def noSymbol: Symbol + + /** A fresh name for class or member symbol names. + * + * Fresh names are constructed using the following format `prefix + "$macro$" + freshIndex`. + * The `freshIndex` are unique within the current source file. + * + * Examples: See `scala.annotation.MacroAnnotation` + * + * @param prefix Prefix of the fresh name + */ + @experimental + def freshName(prefix: String): String } /** Makes extension methods on `Symbol` available without any imports */ @@ -3734,6 +3870,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** The full name of this symbol up to the root package */ def fullName: String + /** Type of the definition */ + @experimental + def info: TypeRepr + /** The position of this symbol */ def pos: Option[Position] @@ -3879,17 +4019,17 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def declaredTypes: List[Symbol] /** Type member with the given name directly declared in the class */ - @deprecated("Use typeMember", "3.1.0") + @deprecated("Use declaredType or typeMember", "3.1.0") def memberType(name: String): Symbol - /** Type member with the given name directly declared in the class */ + /** Type member with the given name declared or inherited in the class */ def typeMember(name: String): Symbol /** Type member directly declared in the class */ - @deprecated("Use typeMembers", "3.1.0") + @deprecated("Use declaredTypes or typeMembers", "3.1.0") def memberTypes: List[Symbol] - /** Type member directly declared in the class */ + /** Type member directly declared or inherited in the class */ def typeMembers: List[Symbol] /** All members directly declared in the class */ @@ -4157,6 +4297,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => */ def FunctionClass(arity: Int, isImplicit: Boolean = false, isErased: Boolean = false): Symbol + /** The `scala.runtime.ErasedFunction` built-in trait. */ + @experimental + def ErasedFunctionClass: Symbol + /** Function-like object that maps arity to symbols for classes `scala.TupleX`. * - 0th element is `NoSymbol` * - 1st element is `NoSymbol` @@ -4201,7 +4345,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // FLAGS // /////////////// - /** FlagSet of a Symbol */ + /** Flags of a Symbol */ type Flags /** Module object of `type Flags` */ @@ -4213,6 +4357,13 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is this symbol `abstract` */ def Abstract: Flags + /** Is this an abstract override method? + * + * This corresponds to a definition declared as "abstract override def" in the source. + * See https://stackoverflow.com/questions/23645172/why-is-abstract-override-required-not-override-alone-in-subtrait for examples. + */ + @experimental def AbsOverride: Flags + /** Is this generated by Scala compiler. * Corresponds to ACC_SYNTHETIC in the JVM. */ @@ -4278,6 +4429,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is implemented as a Java static */ def JavaStatic: Flags + /** Is this an annotation defined in Java */ + @experimental def JavaAnnotation: Flags + /** Is this symbol `lazy` */ def Lazy: Flags @@ -4336,7 +4490,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def StableRealizable: Flags /** Is this symbol marked as static. Mapped to static Java member */ - def Static: Flags + @deprecated("Use JavaStatic instead", "3.3.0") def Static: Flags /** Is this symbol to be tagged Java Synthetic */ def Synthetic: Flags @@ -4370,6 +4524,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => end extension } + /////////////// // POSITIONS // /////////////// @@ -4748,7 +4903,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => case self: ValDef => self } val body = tree.body.map(transformStatement(_)(tree.symbol)) - ClassDef.copy(tree)(tree.name, constructor.asInstanceOf[DefDef], parents, self, body) // cast as workaround for lampepfl/dotty#14821. TODO remove when referenceVersion >= 3.2.0-RC1 + ClassDef.copy(tree)(tree.name, constructor, parents, self, body) case tree: Import => Import.copy(tree)(transformTerm(tree.expr)(owner), tree.selectors) case tree: Export => diff --git a/library/src/scala/quoted/runtime/QuoteMatching.scala b/library/src/scala/quoted/runtime/QuoteMatching.scala index 2a76143e9868..c95ffe87b5dc 100644 --- a/library/src/scala/quoted/runtime/QuoteMatching.scala +++ b/library/src/scala/quoted/runtime/QuoteMatching.scala @@ -17,7 +17,7 @@ trait QuoteMatching: * - `ExprMatch.unapply('{ f(0, myInt) })('{ f(patternHole[Int], patternHole[Int]) }, _)` * will return `Some(Tuple2('{0}, '{ myInt }))` * - `ExprMatch.unapply('{ f(0, "abc") })('{ f(0, patternHole[Int]) }, _)` - * will return `None` due to the missmatch of types in the hole + * will return `None` due to the mismatch of types in the hole * * Holes: * - scala.quoted.runtime.Patterns.patternHole[T]: hole that matches an expression `x` of type `Expr[U]` @@ -27,7 +27,7 @@ trait QuoteMatching: * @param pattern `Expr[Any]` containing the pattern tree * @return None if it did not match, `Some(tup)` if it matched where `tup` contains `Expr[Ti]`` */ - def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: Expr[Any])(using pattern: Expr[Any]): Option[Tup] + def unapply[TypeBindings, Tup <: Tuple](scrutinee: Expr[Any])(using pattern: Expr[Any]): Option[Tup] } val TypeMatch: TypeMatchModule @@ -40,5 +40,10 @@ trait QuoteMatching: * @param pattern `Type[?]` containing the pattern tree * @return None if it did not match, `Some(tup)` if it matched where `tup` contains `Type[Ti]`` */ - def unapply[TypeBindings <: Tuple, Tup <: Tuple](scrutinee: Type[?])(using pattern: Type[?]): Option[Tup] + def unapply[TypeBindings, Tup <: Tuple](scrutinee: Type[?])(using pattern: Type[?]): Option[Tup] } + +object QuoteMatching: + type KList + type KCons[+H <: AnyKind, +T <: KList] <: KList + type KNil <: KList diff --git a/library/src/scala/runtime/ErasedFunction.scala b/library/src/scala/runtime/ErasedFunction.scala new file mode 100644 index 000000000000..7e9211bba75a --- /dev/null +++ b/library/src/scala/runtime/ErasedFunction.scala @@ -0,0 +1,11 @@ +package scala.runtime + +import scala.annotation.experimental + +/** Marker trait for function types with erased parameters. + * + * This trait will be refined with an `apply` method with erased parameters: + * ErasedFunction { def apply([erased] x_1: P_1, ..., [erased] x_N: P_N): R } + * This type will be erased to FunctionL, where L = N - count(erased). + */ +@experimental trait ErasedFunction diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index 0bb78aee94ad..0edbe0e748f4 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -9,19 +9,21 @@ import scala.annotation.* */ object LazyVals { @nowarn - private[this] val unsafe: sun.misc.Unsafe = - classOf[sun.misc.Unsafe].getDeclaredFields.nn.find { field => - field.nn.getType == classOf[sun.misc.Unsafe] && { - field.nn.setAccessible(true) - true - } - } - .map(_.nn.get(null).asInstanceOf[sun.misc.Unsafe]) - .getOrElse { - throw new ExceptionInInitializerError { - new IllegalStateException("Can't find instance of sun.misc.Unsafe") - } - } + private[this] val unsafe: sun.misc.Unsafe = { + def throwInitializationException() = + throw new ExceptionInInitializerError( + new IllegalStateException("Can't find instance of sun.misc.Unsafe") + ) + try + val unsafeField = classOf[sun.misc.Unsafe].getDeclaredField("theUnsafe").nn + if unsafeField.getType == classOf[sun.misc.Unsafe] then + unsafeField.setAccessible(true) + unsafeField.get(null).asInstanceOf[sun.misc.Unsafe] + else + throwInitializationException() + catch case _: NoSuchFieldException => + throwInitializationException() + } private[this] val base: Int = { val processors = java.lang.Runtime.getRuntime.nn.availableProcessors() @@ -43,28 +45,25 @@ object LazyVals { /* ------------- Start of public API ------------- */ - @experimental - sealed trait LazyValControlState + // This trait extends Serializable to fix #16806 that caused a race condition + sealed trait LazyValControlState extends Serializable /** * Used to indicate the state of a lazy val that is being * evaluated and of which other threads await the result. */ - @experimental final class Waiting extends CountDownLatch(1) with LazyValControlState /** * Used to indicate the state of a lazy val that is currently being * evaluated with no other thread awaiting its result. */ - @experimental object Evaluating extends LazyValControlState /** * Used to indicate the state of a lazy val that has been evaluated to * `null`. */ - @experimental object NullValue extends LazyValControlState final val BITS_PER_LAZY_VAL = 2L @@ -84,7 +83,6 @@ object LazyVals { unsafe.compareAndSwapLong(t, offset, e, n) } - @experimental def objCAS(t: Object, offset: Long, exp: Object, n: Object): Boolean = { if (debug) println(s"objCAS($t, $exp, $n)") @@ -145,7 +143,6 @@ object LazyVals { r } - @experimental def getStaticFieldOffset(field: java.lang.reflect.Field): Long = { @nowarn val r = unsafe.staticFieldOffset(field) diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 3b7d009ff6f3..09feaf11c31d 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -31,7 +31,7 @@ object Predef: * @tparam T the type of the value to be summoned * @return the given value typed: the provided type parameter */ - transparent inline def summon[T](using inline x: T): x.type = x + transparent inline def summon[T](using x: T): x.type = x // Extension methods for working with explicit nulls diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 5c01f66ffd46..091e75fa06e1 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -51,6 +51,7 @@ object language: /** Experimental support for using indentation for arguments */ @compileTimeOnly("`fewerBraces` can only be used at compile time in import statements") + @deprecated("`fewerBraces` is now standard, no language import is needed", since = "3.3") object fewerBraces /** Experimental support for typechecked exception capabilities @@ -60,6 +61,22 @@ object language: @compileTimeOnly("`saferExceptions` can only be used at compile time in import statements") object saferExceptions + /** Adds support for clause interleaving: + * Methods can now have as many type clauses as they like, this allows to have type bounds depend on terms: `def f(x: Int)[A <: x.type]: A` + * + * @see [[http://dotty.epfl.ch/docs/reference/other-new-features/explicit-nulls.html]] + */ + @compileTimeOnly("`clauseInterleaving` can only be used at compile time in import statements") + object clauseInterleaving + + /** Adds support for relaxed imports of extension methods. + * Extension methods with the same name can be imported from several places. + * + * @see [[http://dotty.epfl.ch/docs/reference/contextual/extension-methods]] + */ + @compileTimeOnly("`relaxedExtensionImports` can only be used at compile time in import statements") + object relaxedExtensionImports + /** Experimental support for pure function type syntax * * @see [[https://dotty.epfl.ch/docs/reference/experimental/purefuns]] @@ -73,6 +90,14 @@ object language: */ @compileTimeOnly("`captureChecking` can only be used at compile time in import statements") object captureChecking + + /** Experimental support for automatic conversions of arguments, without requiring + * a langauge import `import scala.language.implicitConversions`. + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] + */ + @compileTimeOnly("`into` can only be used at compile time in import statements") + object into end experimental /** The deprecated object contains features that are no longer officially suypported in Scala. @@ -192,7 +217,6 @@ object language: @compileTimeOnly("`3.2` can only be used at compile time in import statements") object `3.2` -/* This can be added when we go to 3.3 /** Set source version to 3.3-migration. * * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] @@ -206,5 +230,5 @@ object language: */ @compileTimeOnly("`3.3` can only be used at compile time in import statements") object `3.3` -*/ + end language diff --git a/library/src/scala/util/NotGiven.scala b/library/src/scala/util/NotGiven.scala index 99cc903d4426..973e709042cb 100644 --- a/library/src/scala/util/NotGiven.scala +++ b/library/src/scala/util/NotGiven.scala @@ -31,11 +31,13 @@ trait LowPriorityNotGiven { } object NotGiven extends LowPriorityNotGiven { + private val cachedValue = new NotGiven[Nothing]() + /** A value of type `NotGiven` to signal a successful search for `NotGiven[C]` (i.e. a failing * search for `C`). A reference to this value will be explicitly constructed by Dotty's * implicit search algorithm */ - def value: NotGiven[Nothing] = new NotGiven[Nothing]() + def value: NotGiven[Nothing] = cachedValue /** One of two ambiguous methods used to emulate negation in Scala 2 */ given amb1[T](using ev: T): NotGiven[T] = ??? diff --git a/library/src/scala/util/boundary.scala b/library/src/scala/util/boundary.scala new file mode 100644 index 000000000000..2edd754bbb93 --- /dev/null +++ b/library/src/scala/util/boundary.scala @@ -0,0 +1,64 @@ +package scala.util +import scala.annotation.implicitNotFound + +/** A boundary that can be exited by `break` calls. + * `boundary` and `break` represent a unified and superior alternative for the + * `scala.util.control.NonLocalReturns` and `scala.util.control.Breaks` APIs. + * The main differences are: + * + * - Unified names: `boundary` to establish a scope, `break` to leave it. + * `break` can optionally return a value. + * - Integration with exceptions. `break`s are logically non-fatal exceptions. + * The `Break` exception class extends `RuntimeException` and is optimized so + * that stack trace generation is suppressed. + * - Better performance: breaks to enclosing scopes in the same method can + * be rewritten to jumps. + * + * Example usage: + * + * import scala.util.boundary, boundary.break + * + * def firstIndex[T](xs: List[T], elem: T): Int = + * boundary: + * for (x, i) <- xs.zipWithIndex do + * if x == elem then break(i) + * -1 + */ +object boundary: + + /** User code should call `break.apply` instead of throwing this exception + * directly. + */ + final class Break[T] private[boundary](val label: Label[T], val value: T) + extends RuntimeException( + /*message*/ null, /*cause*/ null, /*enableSuppression=*/ false, /*writableStackTrace*/ false) + + /** Labels are targets indicating which boundary will be exited by a `break`. + */ + @implicitNotFound("explain=A Label is generated from an enclosing `scala.util.boundary` call.\nMaybe that boundary is missing?") + final class Label[-T] + + /** Abort current computation and instead return `value` as the value of + * the enclosing `boundary` call that created `label`. + */ + def break[T](value: T)(using label: Label[T]): Nothing = + throw Break(label, value) + + /** Abort current computation and instead continue after the `boundary` call that + * created `label`. + */ + def break()(using label: Label[Unit]): Nothing = + throw Break(label, ()) + + /** Run `body` with freshly generated label as implicit argument. Catch any + * breaks associated with that label and return their results instead of + * `body`'s result. + */ + inline def apply[T](inline body: Label[T] ?=> T): T = + val local = Label[T]() + try body(using local) + catch case ex: Break[T] @unchecked => + if ex.label eq local then ex.value + else throw ex + +end boundary diff --git a/library/src/scala/util/control/NonLocalReturns.scala b/library/src/scala/util/control/NonLocalReturns.scala index c32e0ff16457..ad4dc05f36ac 100644 --- a/library/src/scala/util/control/NonLocalReturns.scala +++ b/library/src/scala/util/control/NonLocalReturns.scala @@ -7,8 +7,19 @@ package scala.util.control * import scala.util.control.NonLocalReturns.* * * returning { ... throwReturn(x) ... } + * + * This API has been deprecated. Its functionality is better served by + * + * - `scala.util.boundary` in place of `returning` + * - `scala.util.break` in place of `throwReturn` + * + * The new abstractions work with plain `RuntimeExceptions` and are more + * performant, since returns within the scope of the same method can be + * rewritten by the compiler to jumps. */ +@deprecated("Use scala.util.boundary instead", "3.3") object NonLocalReturns { + @deprecated("Use scala.util.boundary.Break instead", "3.3") class ReturnThrowable[T] extends ControlThrowable { private var myResult: T = _ def throwReturn(result: T): Nothing = { @@ -19,10 +30,12 @@ object NonLocalReturns { } /** Performs a nonlocal return by throwing an exception. */ + @deprecated("Use scala.util.boundary.break instead", "3.3") def throwReturn[T](result: T)(using returner: ReturnThrowable[? >: T]): Nothing = returner.throwReturn(result) /** Enable nonlocal returns in `op`. */ + @deprecated("Use scala.util.boundary instead", "3.3") def returning[T](op: ReturnThrowable[T] ?=> T): T = { val returner = new ReturnThrowable[T] try op(using returner) diff --git a/project/Build.scala b/project/Build.scala index 5fab2b80229a..f3ec6bb54548 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -80,9 +80,9 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.2.1" + val referenceVersion = "3.3.0" - val baseVersion = "3.2.2" + val baseVersion = "3.3.1" // Versions used by the vscode extension to create a new project // This should be the latest published releases. @@ -98,7 +98,7 @@ object Build { * set to 3.1.3. If it is going to be 3.1.0, it must be set to the latest * 3.0.x release. */ - val previousDottyVersion = "3.2.1" + val previousDottyVersion = "3.3.0" object CompatMode { final val BinaryCompatible = 0 @@ -360,6 +360,7 @@ object Build { // Settings used when compiling dotty with a non-bootstrapped dotty lazy val commonBootstrappedSettings = commonDottySettings ++ NoBloopExport.settings ++ Seq( + // To enable support of scaladoc and language-server projects you need to change this to true and use sbt as your build server bspEnabled := false, (Compile / unmanagedSourceDirectories) += baseDirectory.value / "src-bootstrapped", @@ -489,7 +490,8 @@ object Build { settings(commonJavaSettings). settings(commonMiMaSettings). settings( - versionScheme := Some("semver-spec") + versionScheme := Some("semver-spec"), + mimaBinaryIssueFilters ++= MiMaFilters.Interfaces ) /** Find an artifact with the given `name` in `classpath` */ @@ -545,7 +547,7 @@ object Build { // get libraries onboard libraryDependencies ++= Seq( - "org.scala-lang.modules" % "scala-asm" % "9.3.0-scala-1", // used by the backend + "org.scala-lang.modules" % "scala-asm" % "9.5.0-scala-1", // used by the backend Dependencies.oldCompilerInterface, // we stick to the old version to avoid deprecation warnings "org.jline" % "jline-reader" % "3.19.0", // used by the REPL "org.jline" % "jline-terminal" % "3.19.0", @@ -607,7 +609,7 @@ object Build { if (args.contains("--help")) { println( s""" - |usage: testCompilation [--help] [--from-tasty] [--update-checkfiles] [] + |usage: testCompilation [--help] [--from-tasty] [--update-checkfiles] [--failed] [] | |By default runs tests in dotty.tools.dotc.*CompilationTests and dotty.tools.dotc.coverage.*, |excluding tests tagged with dotty.SlowTests. @@ -615,6 +617,7 @@ object Build { | --help show this message | --from-tasty runs tests in dotty.tools.dotc.FromTastyTests | --update-checkfiles override the checkfiles that did not match with the current output + | --failed re-run only failed tests | substring of the path of the tests file | """.stripMargin @@ -623,11 +626,13 @@ object Build { } else { val updateCheckfile = args.contains("--update-checkfiles") + val rerunFailed = args.contains("--failed") val fromTasty = args.contains("--from-tasty") - val args1 = if (updateCheckfile | fromTasty) args.filter(x => x != "--update-checkfiles" && x != "--from-tasty") else args + val args1 = if (updateCheckfile | fromTasty | rerunFailed) args.filter(x => x != "--update-checkfiles" && x != "--from-tasty" && x != "--failed") else args val test = if (fromTasty) "dotty.tools.dotc.FromTastyTests" else "dotty.tools.dotc.*CompilationTests dotty.tools.dotc.coverage.*" val cmd = s" $test -- --exclude-categories=dotty.SlowTests" + (if (updateCheckfile) " -Ddotty.tests.updateCheckfiles=TRUE" else "") + + (if (rerunFailed) " -Ddotty.tests.rerunFailed=TRUE" else "") + (if (args1.nonEmpty) " -Ddotty.tests.filter=" + args1.mkString(" ") else "") (Test / testOnly).toTask(cmd) } @@ -839,6 +844,7 @@ object Build { "-sourcepath", (Compile / sourceDirectories).value.map(_.getAbsolutePath).distinct.mkString(File.pathSeparator), "-Yexplicit-nulls", ), + (Compile / doc / scalacOptions) ++= ScaladocConfigs.DefaultGenerationSettings.value.settings ) lazy val `scala3-library` = project.in(file("library")).asDottyLibrary(NonBootstrapped) @@ -920,7 +926,6 @@ object Build { lazy val `stdlib-bootstrapped` = project.in(file("stdlib-bootstrapped")). withCommonSettings(Bootstrapped). dependsOn(dottyCompiler(Bootstrapped) % "provided; compile->runtime; test->test"). - dependsOn(`scala3-tasty-inspector` % "test->test"). settings(commonBootstrappedSettings). settings( moduleName := "scala-library", @@ -1051,15 +1056,13 @@ object Build { // with the bootstrapped library on the classpath. lazy val `scala3-sbt-bridge-tests` = project.in(file("sbt-bridge/test")). dependsOn(dottyCompiler(Bootstrapped) % Test). + dependsOn(`scala3-sbt-bridge`). settings(commonBootstrappedSettings). settings( Compile / sources := Seq(), Test / scalaSource := baseDirectory.value, Test / javaSource := baseDirectory.value, - - // Tests disabled until zinc-api-info cross-compiles with 2.13, - // alternatively we could just copy in sources the part of zinc-api-info we need. - Test / sources := Seq() + libraryDependencies += ("org.scala-sbt" %% "zinc-apiinfo" % "1.8.0" % Test).cross(CrossVersion.for3Use2_13) ) lazy val `scala3-language-server` = project.in(file("language-server")). @@ -1127,6 +1130,7 @@ object Build { enablePlugins(DottyJSPlugin). dependsOn(`scala3-library-bootstrappedJS`). settings( + bspEnabled := false, scalacOptions --= Seq("-Xfatal-warnings", "-deprecation"), // Required to run Scala.js tests. @@ -1189,6 +1193,9 @@ object Build { "isFullOpt" -> (stage == FullOptStage), "compliantAsInstanceOfs" -> (sems.asInstanceOfs == CheckedBehavior.Compliant), "compliantArrayIndexOutOfBounds" -> (sems.arrayIndexOutOfBounds == CheckedBehavior.Compliant), + "compliantArrayStores" -> (sems.arrayStores == CheckedBehavior.Compliant), + "compliantNegativeArraySizes" -> (sems.negativeArraySizes == CheckedBehavior.Compliant), + "compliantStringIndexOutOfBounds" -> (sems.stringIndexOutOfBounds == CheckedBehavior.Compliant), "compliantModuleInit" -> (sems.moduleInit == CheckedBehavior.Compliant), "strictFloats" -> sems.strictFloats, "productionMode" -> sems.productionMode, @@ -1211,6 +1218,18 @@ object Build { org.scalajs.jsenv.Input.Script(f) +: (Test / jsEnvInput).value }, + Test / unmanagedSourceDirectories ++= { + val linkerConfig = scalaJSStage.value match { + case FastOptStage => (Test / fastLinkJS / scalaJSLinkerConfig).value + case FullOptStage => (Test / fullLinkJS / scalaJSLinkerConfig).value + } + + if (linkerConfig.moduleKind != ModuleKind.NoModule && !linkerConfig.closureCompiler) + Seq(baseDirectory.value / "test-require-multi-modules") + else + Nil + }, + (Compile / managedSources) ++= { val dir = fetchScalaJSSource.value ( @@ -1265,6 +1284,14 @@ object Build { ) }, + /* For some reason, in Scala 3, the implementation of IterableDefaultTest + * resolves to `scala.collection.ArrayOps.ArrayIterator`, whose `next()` + * method is not compliant when called past the last element on Scala.js. + * It relies on catching an `ArrayIndexOutOfBoundsException`. + * We have to ignore it here. + */ + Test / testOptions := Seq(Tests.Filter(_ != "org.scalajs.testsuite.javalib.lang.IterableDefaultTest")), + Test / managedResources ++= { val testDir = fetchScalaJSSource.value / "test-suite/js/src/test" @@ -1300,6 +1327,7 @@ object Build { Seq( "-Ddotty.tests.classes.dottyLibraryJS=" + dottyLibraryJSJar, + "-Ddotty.tests.classes.scalaJSJavalib=" + findArtifactPath(externalJSDeps, "scalajs-javalib"), "-Ddotty.tests.classes.scalaJSLibrary=" + findArtifactPath(externalJSDeps, "scalajs-library_2.13"), ) }, @@ -1820,9 +1848,10 @@ object Build { settings(disableDocSetting). settings( versionScheme := Some("semver-spec"), - if (mode == Bootstrapped) { - commonMiMaSettings - } else { + if (mode == Bootstrapped) Def.settings( + commonMiMaSettings, + mimaBinaryIssueFilters ++= MiMaFilters.TastyCore, + ) else { Nil } ) @@ -1877,8 +1906,7 @@ object ScaladocConfigs { ) } - lazy val DefaultGenerationConfig = Def.task { - def distLocation = (dist / pack).value + lazy val DefaultGenerationSettings = Def.task { def projectVersion = version.value def socialLinks = SocialLinks(List( "github::https://github.com/lampepfl/dotty", @@ -1919,6 +1947,11 @@ object ScaladocConfigs { ) } + lazy val DefaultGenerationConfig = Def.task { + def distLocation = (dist / Compile / pack).value + DefaultGenerationSettings.value + } + lazy val Scaladoc = Def.task { DefaultGenerationConfig.value .add(UseJavacp(true)) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 3708ec528c79..54bc6ecadfe0 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -4,27 +4,30 @@ import sbt._ * to ensure the same version of the dependency is used in all projects */ object Dependencies { - private val jacksonVersion = "2.13.3" + private val jacksonVersion = "2.15.1" val `jackson-databind` = "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion val `jackson-dataformat-yaml` = "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % jacksonVersion - private val flexmarkVersion = "0.42.12" + // Freeze on 0.62.x as 0.64.0 requires Java 11 + private val flexmarkVersion = "0.62.2" val flexmarkDeps = Seq( "com.vladsch.flexmark" % "flexmark" % flexmarkVersion, - "com.vladsch.flexmark" % "flexmark-html-parser" % flexmarkVersion, + "com.vladsch.flexmark" % "flexmark-util-ast" % flexmarkVersion, + "com.vladsch.flexmark" % "flexmark-util-data" % flexmarkVersion, + "com.vladsch.flexmark" % "flexmark-util-html" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-anchorlink" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-autolink" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-emoji" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-gfm-strikethrough" % flexmarkVersion, - "com.vladsch.flexmark" % "flexmark-ext-gfm-tables" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-gfm-tasklist" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-wikilink" % flexmarkVersion, + "com.vladsch.flexmark" % "flexmark-ext-tables" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-yaml-front-matter" % flexmarkVersion, ) - val newCompilerInterface = "org.scala-sbt" % "compiler-interface" % "1.7.1" + val newCompilerInterface = "org.scala-sbt" % "compiler-interface" % "1.8.0" val oldCompilerInterface = "org.scala-sbt" % "compiler-interface" % "1.3.5" } diff --git a/project/DocumentationWebsite.scala b/project/DocumentationWebsite.scala index ec32144ac0a5..5f8e499af62f 100644 --- a/project/DocumentationWebsite.scala +++ b/project/DocumentationWebsite.scala @@ -1,4 +1,5 @@ import java.io.File +import java.net.URI import java.nio.file.Paths import sbt._ import Build._ @@ -42,13 +43,13 @@ object DocumentationWebsite { import _root_.scala.concurrent._ import _root_.scala.concurrent.duration.Duration import ExecutionContext.Implicits.global - val inkuireVersion = "1.0.0-M3" + val inkuireVersion = "v1.0.0-M7" val inkuireLink = s"https://github.com/VirtusLab/Inkuire/releases/download/$inkuireVersion/inkuire.js" val inkuireDestinationFile = baseDest / "dotty_res" / "scripts" / "inkuire.js" sbt.IO.touch(inkuireDestinationFile) def tryFetch(retries: Int, timeout: Duration): Unit = { - val downloadProcess = (new java.net.URL(inkuireLink) #> inkuireDestinationFile).run() + val downloadProcess = (new URI(inkuireLink).toURL #> inkuireDestinationFile).run() val result: Future[Int] = Future(blocking(downloadProcess.exitValue())) try { Await.result(result, timeout) match { diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 81510d22d2c2..112a5601615c 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -3,21 +3,20 @@ import com.typesafe.tools.mima.core._ object MiMaFilters { val Library: Seq[ProblemFilter] = Seq( - ProblemFilters.exclude[MissingClassProblem]("scala.annotation.internal.MappedAlternative"), + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.unchecked.uncheckedCaptures"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.LazyVals.getStaticFieldOffset"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.LazyVals.objCAS"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.LazyVals$LazyValControlState"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.LazyVals$Evaluating$"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.LazyVals$NullValue$"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.LazyVals$Waiting"), - ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyVals.Evaluating"), - ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyVals.NullValue"), + // Scala.js only: new runtime support class in 3.2.3; not available to users + ProblemFilters.exclude[MissingClassProblem]("scala.scalajs.runtime.AnonFunctionXXL"), - ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.pureFunctions"), - ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.captureChecking"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$pureFunctions$"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$captureChecking$"), - ProblemFilters.exclude[MissingClassProblem]("scala.caps"), + // New experimental features in 3.3.X + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.clauseInterleaving"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$clauseInterleaving$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.relaxedExtensionImports"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$relaxedExtensionImports$"), + // end of New experimental features in 3.3.X + ) + val TastyCore: Seq[ProblemFilter] = Seq( + ) + val Interfaces: Seq[ProblemFilter] = Seq( ) } diff --git a/project/ScaladocGeneration.scala b/project/ScaladocGeneration.scala index c6c4393c071f..fd972311da1d 100644 --- a/project/ScaladocGeneration.scala +++ b/project/ScaladocGeneration.scala @@ -141,6 +141,7 @@ object ScaladocGeneration { def remove[T <: Arg[_]: ClassTag]: GenerationConfig def withTargets(targets: Seq[String]): GenerationConfig def serialize: String + def settings: Seq[String] } object GenerationConfig { @@ -173,6 +174,9 @@ object ScaladocGeneration { ++ targets ).mkString(" ") + override def settings: Seq[String] = + args.map(_.serialize) ++ targets + private def argsWithout[T <: Arg[_]]( implicit tag: ClassTag[T] ): (Option[T], Seq[Arg[_]]) = args.foldLeft[(Option[T], Seq[Arg[_]])]((None, Seq.empty)) { diff --git a/project/build.properties b/project/build.properties index 22af2628c413..46e43a97ed86 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.7.1 +sbt.version=1.8.2 diff --git a/project/build.sbt b/project/build.sbt index e19492c42022..188dfa5c6702 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -1,7 +1,4 @@ // Used by VersionUtil to get gitHash and commitDate libraryDependencies += "org.eclipse.jgit" % "org.eclipse.jgit" % "4.11.0.201803080745-r" - -Compile / unmanagedSourceDirectories += - baseDirectory.value / "../language-server/src/dotty/tools/languageserver/config" libraryDependencies += Dependencies.`jackson-databind` diff --git a/project/plugins.sbt b/project/plugins.sbt index b6bc5f1184b6..ccbcdeed22fc 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,16 +2,20 @@ // // e.g. addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.1.0") -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.10.1") +// some plugins haven't moved to scala-xml 2.x yet +libraryDependencySchemes += + "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.10") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.12.0") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.20") -addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.13") +addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1") + +addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.17") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") diff --git a/project/scripts/bisect.scala b/project/scripts/bisect.scala new file mode 100755 index 000000000000..2e554a885c79 --- /dev/null +++ b/project/scripts/bisect.scala @@ -0,0 +1,248 @@ +/* +This script will bisect a problem with the compiler based on success/failure of the validation script passed as an argument. +It starts with a fast bisection on released nightly builds. +Then it will bisect the commits between the last nightly that worked and the first nightly that failed. +Look at the `usageMessage` below for more details. +*/ + + +import sys.process._ +import scala.io.Source +import java.io.File +import java.nio.file.attribute.PosixFilePermissions +import java.nio.charset.StandardCharsets +import java.nio.file.Files + +val usageMessage = """ + |Usage: + | > scala-cli project/scripts/bisect.scala -- [] + | + |The should be one of: + |* compile ... + |* run ... + |* + | + |The arguments for 'compile' and 'run' should be paths to the source file(s) and optionally additional options passed directly to scala-cli. + | + |A custom validation script should be executable and accept a single parameter, which will be the scala version to validate. + |Look at bisect-cli-example.sh and bisect-expect-example.exp for reference. + |If you want to use one of the example scripts - use a copy of the file instead of modifying it in place because that might mess up the checkout. + | + |The optional may be any combination of: + |* --dry-run + | Don't try to bisect - just make sure the validation command works correctly + | + |* --releases + | Bisect only releases from the given range (defaults to all releases). + | The range format is ..., where both and are optional, e.g. + | * 3.1.0-RC1-bin-20210827-427d313-NIGHTLY..3.2.1-RC1-bin-20220716-bb9c8ff-NIGHTLY + | * 3.2.1-RC1-bin-20220620-de3a82c-NIGHTLY.. + | * ..3.3.0-RC1-bin-20221124-e25362d-NIGHTLY + | The ranges are treated as inclusive. + | + |* --bootstrapped + | Publish locally and test a bootstrapped compiler rather than a nonboostrapped one. + | + |* --should-fail + | Expect the validation command to fail rather that succeed. This can be used e.g. to find out when some illegal code started to compile. + | + |Warning: The bisect script should not be run multiple times in parallel because of a potential race condition while publishing artifacts locally. + +""".stripMargin + +@main def run(args: String*): Unit = + val scriptOptions = + try ScriptOptions.fromArgs(args) + catch + case _ => + sys.error(s"Wrong script parameters.\n${usageMessage}") + + val validationScript = scriptOptions.validationCommand.validationScript + val releases = Releases.fromRange(scriptOptions.releasesRange) + val releaseBisect = ReleaseBisect(validationScript, shouldFail = scriptOptions.shouldFail, releases) + + releaseBisect.verifyEdgeReleases() + + if (!scriptOptions.dryRun) then + val (lastGoodRelease, firstBadRelease) = releaseBisect.bisectedGoodAndBadReleases() + println(s"Last good release: ${lastGoodRelease.version}") + println(s"First bad release: ${firstBadRelease.version}") + println("\nFinished bisecting releases\n") + + val commitBisect = CommitBisect(validationScript, shouldFail = scriptOptions.shouldFail, bootstrapped = scriptOptions.bootstrapped, lastGoodRelease.hash, firstBadRelease.hash) + commitBisect.bisect() + + +case class ScriptOptions(validationCommand: ValidationCommand, dryRun: Boolean, bootstrapped: Boolean, releasesRange: ReleasesRange, shouldFail: Boolean) +object ScriptOptions: + def fromArgs(args: Seq[String]) = + val defaultOptions = ScriptOptions( + validationCommand = null, + dryRun = false, + bootstrapped = false, + ReleasesRange(first = None, last = None), + shouldFail = false + ) + parseArgs(args, defaultOptions) + + private def parseArgs(args: Seq[String], options: ScriptOptions): ScriptOptions = + args match + case "--dry-run" :: argsRest => parseArgs(argsRest, options.copy(dryRun = true)) + case "--bootstrapped" :: argsRest => parseArgs(argsRest, options.copy(bootstrapped = true)) + case "--releases" :: argsRest => + val range = ReleasesRange.tryParse(argsRest.head).get + parseArgs(argsRest.tail, options.copy(releasesRange = range)) + case "--should-fail" :: argsRest => parseArgs(argsRest, options.copy(shouldFail = true)) + case _ => + val command = ValidationCommand.fromArgs(args) + options.copy(validationCommand = command) + +enum ValidationCommand: + case Compile(args: Seq[String]) + case Run(args: Seq[String]) + case CustomValidationScript(scriptFile: File) + + def validationScript: File = this match + case Compile(args) => + ValidationScript.tmpScalaCliScript(command = "compile", args) + case Run(args) => + ValidationScript.tmpScalaCliScript(command = "run", args) + case CustomValidationScript(scriptFile) => + ValidationScript.copiedFrom(scriptFile) + +object ValidationCommand: + def fromArgs(args: Seq[String]) = args match + case Seq("compile", commandArgs*) => Compile(commandArgs) + case Seq("run", commandArgs*) => Run(commandArgs) + case Seq(path) => CustomValidationScript(new File(path)) + + +object ValidationScript: + def copiedFrom(file: File): File = + val fileContent = scala.io.Source.fromFile(file).mkString + tmpScript(fileContent) + + def tmpScalaCliScript(command: String, args: Seq[String]): File = tmpScript(s""" + |#!/usr/bin/env bash + |scala-cli ${command} -S "$$1" --server=false ${args.mkString(" ")} + |""".stripMargin + ) + + private def tmpScript(content: String): File = + val executableAttr = PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxr-xr-x")) + val tmpPath = Files.createTempFile("scala-bisect-validator", "", executableAttr) + val tmpFile = tmpPath.toFile + + print(s"Bisecting with validation script: ${tmpPath.toAbsolutePath}\n") + print("#####################################\n") + print(s"${content}\n\n") + print("#####################################\n\n") + + tmpFile.deleteOnExit() + Files.write(tmpPath, content.getBytes(StandardCharsets.UTF_8)) + tmpFile + + +case class ReleasesRange(first: Option[String], last: Option[String]): + def filter(releases: Seq[Release]) = + def releaseIndex(version: String): Int = + val index = releases.indexWhere(_.version == version) + assert(index > 0, s"${version} matches no nightly compiler release") + index + + val startIdx = first.map(releaseIndex(_)).getOrElse(0) + val endIdx = last.map(releaseIndex(_) + 1).getOrElse(releases.length) + val filtered = releases.slice(startIdx, endIdx).toVector + assert(filtered.nonEmpty, "No matching releases") + filtered + +object ReleasesRange: + def all = ReleasesRange(None, None) + def tryParse(range: String): Option[ReleasesRange] = range match + case s"${first}...${last}" => Some(ReleasesRange( + Some(first).filter(_.nonEmpty), + Some(last).filter(_.nonEmpty) + )) + case _ => None + +class Releases(val releases: Vector[Release]) + +object Releases: + lazy val allReleases: Vector[Release] = + val re = raw"""(?<=title=")(.+-bin-\d{8}-\w{7}-NIGHTLY)(?=/")""".r + val html = Source.fromURL("https://repo1.maven.org/maven2/org/scala-lang/scala3-compiler_3/") + re.findAllIn(html.mkString).map(Release.apply).toVector + + def fromRange(range: ReleasesRange): Vector[Release] = range.filter(allReleases) + +case class Release(version: String): + private val re = raw".+-bin-(\d{8})-(\w{7})-NIGHTLY".r + def date: String = + version match + case re(date, _) => date + case _ => sys.error(s"Could not extract date from release name: $version") + def hash: String = + version match + case re(_, hash) => hash + case _ => sys.error(s"Could not extract hash from release name: $version") + + override def toString: String = version + + +class ReleaseBisect(validationScript: File, shouldFail: Boolean, allReleases: Vector[Release]): + assert(allReleases.length > 1, "Need at least 2 releases to bisect") + + private val isGoodReleaseCache = collection.mutable.Map.empty[Release, Boolean] + + def verifyEdgeReleases(): Unit = + println(s"Verifying the first release: ${allReleases.head.version}") + assert(isGoodRelease(allReleases.head), s"The evaluation script unexpectedly failed for the first checked release") + println(s"Verifying the last release: ${allReleases.last.version}") + assert(!isGoodRelease(allReleases.last), s"The evaluation script unexpectedly succeeded for the last checked release") + + def bisectedGoodAndBadReleases(): (Release, Release) = + val firstBadRelease = bisect(allReleases) + assert(!isGoodRelease(firstBadRelease), s"Bisection error: the 'first bad release' ${firstBadRelease.version} is not a bad release") + val lastGoodRelease = firstBadRelease.previous + assert(isGoodRelease(lastGoodRelease), s"Bisection error: the 'last good release' ${lastGoodRelease.version} is not a good release") + (lastGoodRelease, firstBadRelease) + + extension (release: Release) private def previous: Release = + val idx = allReleases.indexOf(release) + allReleases(idx - 1) + + private def bisect(releases: Vector[Release]): Release = + if releases.length == 2 then + if isGoodRelease(releases.head) then releases.last + else releases.head + else + val mid = releases(releases.length / 2) + if isGoodRelease(mid) then bisect(releases.drop(releases.length / 2)) + else bisect(releases.take(releases.length / 2 + 1)) + + private def isGoodRelease(release: Release): Boolean = + isGoodReleaseCache.getOrElseUpdate(release, { + println(s"Testing ${release.version}") + val result = Seq(validationScript.getAbsolutePath, release.version).! + val isGood = if(shouldFail) result != 0 else result == 0 // invert the process status if failure was expected + println(s"Test result: ${release.version} is a ${if isGood then "good" else "bad"} release\n") + isGood + }) + +class CommitBisect(validationScript: File, shouldFail: Boolean, bootstrapped: Boolean, lastGoodHash: String, fistBadHash: String): + def bisect(): Unit = + println(s"Starting bisecting commits $lastGoodHash..$fistBadHash\n") + val scala3CompilerProject = if bootstrapped then "scala3-compiler-bootstrapped" else "scala3-compiler" + val scala3Project = if bootstrapped then "scala3-bootstrapped" else "scala3" + val validationCommandStatusModifier = if shouldFail then "! " else "" // invert the process status if failure was expected + val bisectRunScript = s""" + |scalaVersion=$$(sbt "print ${scala3CompilerProject}/version" | tail -n1) + |rm -r out + |sbt "clean; ${scala3Project}/publishLocal" + |${validationCommandStatusModifier}${validationScript.getAbsolutePath} "$$scalaVersion" + """.stripMargin + "git bisect start".! + s"git bisect bad $fistBadHash".! + s"git bisect good $lastGoodHash".! + Seq("git", "bisect", "run", "sh", "-c", bisectRunScript).! + s"git bisect reset".! diff --git a/project/scripts/cmdScaladocTests b/project/scripts/cmdScaladocTests index 5d33e3bd7b37..2168e3e8e334 100755 --- a/project/scripts/cmdScaladocTests +++ b/project/scripts/cmdScaladocTests @@ -17,7 +17,7 @@ DOTTY_BOOTSTRAPPED_VERSION_COMMAND="$SBT \"eval println(Build.dottyVersion)\"" DOTTY_BOOTSTRAPPED_VERSION=$(eval $DOTTY_BOOTSTRAPPED_VERSION_COMMAND | tail -n 2 | head -n 1) SOURCE_LINKS_REPOSITORY="lampepfl/dotty" -SOURCE_LINKS_VERSION="$DOTTY_BOOTSTRAPPED_VERSION" +SOURCE_LINKS_VERSION="${GITHUB_SHA:-$DOTTY_BOOTSTRAPPED_VERSION}" "$SBT" "scaladoc/generateTestcasesDocumentation" > "$tmp" 2>&1 || echo "generated testcases project with sbt" dist/target/pack/bin/scaladoc \ @@ -37,9 +37,9 @@ dist/target/pack/bin/scaladoc \ "-snippet-compiler:scaladoc-testcases/docs=compile" \ "-comment-syntax:scaladoc-testcases/src/example/comment-md=markdown,scaladoc-testcases/src/example/comment-wiki=wiki" \ -siteroot scaladoc-testcases/docs \ - -project-footer "Copyright (c) 2002-2022, LAMP/EPFL" \ + -project-footer "Copyright (c) 2002-2023, LAMP/EPFL" \ -default-template static-site-main \ -author -groups -revision main -project-version "${DOTTY_BOOTSTRAPPED_VERSION}" \ - "-quick-links:Learn::https://docs.scala-lang.org/,Install::https://www.scala-lang.org/download/,Playground::https://scastie.scala-lang.org,Find A Library::https://index.scala-lang.org,Community::https://www.scala-lang.org/community/,Blog::https://www.scala-lang.org/blog/" \ + "-quick-links:Learn::https://docs.scala-lang.org/,Install::https://www.scala-lang.org/download/,Playground::https://scastie.scala-lang.org,Find A Library::https://index.scala-lang.org,Community::https://www.scala-lang.org/community/,Blog::https://www.scala-lang.org/blog/," \ out/bootstrap/scaladoc-testcases/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/classes > "$tmp" 2>&1 || echo "generated testcases project with scripts" diff -rq "$OUT1" "scaladoc/output/testcases" diff --git a/project/scripts/dottyCompileBisect.scala b/project/scripts/dottyCompileBisect.scala deleted file mode 100644 index fc61e63bdb78..000000000000 --- a/project/scripts/dottyCompileBisect.scala +++ /dev/null @@ -1,73 +0,0 @@ -// Usage -// > scala-cli project/scripts/dottyCompileBisect.scala -- File1.scala File2.scala -// -// This script will bisect the compilation failure starting with a fast bisection on released nightly builds. -// Then it will bisect the commits between the last nightly that worked and the first nightly that failed. - - -import sys.process._ -import scala.io.Source -import Releases.Release - -@main def dottyCompileBisect(files: String*): Unit = - val releaseBisect = ReleaseBisect(files.toList) - val fistBadRelease = releaseBisect.bisect(Releases.allReleases) - println("\nFinished bisecting releases\n") - fistBadRelease.previous match - case Some(lastGoodRelease) => - println(s"Last good release: $lastGoodRelease\nFirst bad release: $fistBadRelease\n") - val commitBisect = CommitBisect(files.toList) - commitBisect.bisect(lastGoodRelease.hash, fistBadRelease.hash) - case None => - println(s"No good release found") - -class ReleaseBisect(files: List[String]): - - def bisect(releases: Vector[Release]): Release = - assert(releases.length > 1, "Need at least 2 releases to bisect") - if releases.length == 2 then - if isGoodRelease(releases.head) then releases.last - else releases.head - else - val mid = releases(releases.length / 2) - if isGoodRelease(mid) then bisect(releases.drop(releases.length / 2)) - else bisect(releases.take(releases.length / 2 + 1)) - - private def isGoodRelease(release: Release): Boolean = - println(s"Testing ${release.version}") - val res = s"""scala-cli compile ${files.mkString(" ")} -S "${release.version}"""".! - val isGood = res == 0 - println(s"Test result: ${release.version} is a ${if isGood then "good" else "bad"} release\n") - isGood - -object Releases: - lazy val allReleases: Vector[Release] = - val re = raw"(?<=title=$")(.+-bin-\d{8}-\w{7}-NIGHTLY)(?=/$")".r - val html = Source.fromURL("https://repo1.maven.org/maven2/org/scala-lang/scala3-compiler_3/") - re.findAllIn(html.mkString).map(Release.apply).toVector - - case class Release(version: String): - private val re = raw".+-bin-(\d{8})-(\w{7})-NIGHTLY".r - def date: String = - version match - case re(date, _) => date - case _ => sys.error(s"Could not extract date from version $version") - def hash: String = - version match - case re(_, hash) => hash - case _ => sys.error(s"Could not extract hash from version $version") - - def previous: Option[Release] = - val idx = allReleases.indexOf(this) - if idx == 0 then None - else Some(allReleases(idx - 1)) - - override def toString: String = version - -class CommitBisect(files: List[String]): - def bisect(lastGoodHash: String, fistBadHash: String): Unit = - println(s"Starting bisecting commits $lastGoodHash..$fistBadHash\n") - "git bisect start".! - s"git bisect bad $fistBadHash".! - s"git bisect good $lastGoodHash".! - s"git bisect run sh project/scripts/dottyCompileBisect.sh ${files.mkString(" ")}".! diff --git a/project/scripts/dottyCompileBisect.sh b/project/scripts/dottyCompileBisect.sh deleted file mode 100644 index 1cead7a8aefd..000000000000 --- a/project/scripts/dottyCompileBisect.sh +++ /dev/null @@ -1,16 +0,0 @@ -# Usage -# > git bisect start -# > git bisect bad -# > git bisect good -# > git bisect run project/scripts/dottyCompileBisect.sh -# -# Note: Use dottyCompileBisect.scala for faster bisection over commits that spans several days - -files=$@ -shift - -rm -r out -mkdir out -mkdir out/bisect - -sbt "clean; scalac -d out/bisect $files" diff --git a/project/scripts/examples/bisect-cli-example.sh b/project/scripts/examples/bisect-cli-example.sh new file mode 100755 index 000000000000..6eb010cbe8bc --- /dev/null +++ b/project/scripts/examples/bisect-cli-example.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +# Don't use this example script modified in place as it might disappear from the repo during a checkout. +# Instead copy it to a different location first. + +scala-cli compile -S "$1" --server=false file1.scala file2.scala diff --git a/project/scripts/examples/bisect-expect-example.exp b/project/scripts/examples/bisect-expect-example.exp new file mode 100755 index 000000000000..4c094c373d30 --- /dev/null +++ b/project/scripts/examples/bisect-expect-example.exp @@ -0,0 +1,17 @@ +#!/usr/local/bin/expect -f + +# Don't use this example script modified in place as it might disappear from the repo during a checkout. +# Instead copy it to a different location first. + +set scalaVersion [lindex $argv 0] ;# Get the script argument + +set timeout 30 ;# Give scala-cli some time to download the compiler +spawn scala-cli repl -S "$scalaVersion" --server=false ;# Start the REPL +expect "scala>" ;# REPL has started +set timeout 5 +send -- "Seq.empty.len\t" ;# Tab pressed to trigger code completion +expect { + "length" { exit 0 } ;# Exit with success if the expected string appeared somewhere in stdout +} + +exit 1 ;# Otherwise fail - the timeout was exceeded or the REPL crashed diff --git a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java index 20cdfb720538..25b934000144 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java +++ b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java @@ -39,13 +39,16 @@ public void doReport(Diagnostic dia, Context ctx) { StringBuilder rendered = new StringBuilder(); rendered.append(messageAndPos(dia, ctx)); Message message = dia.msg(); + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append(message.message()); String diagnosticCode = String.valueOf(message.errorId().errorNumber()); boolean shouldExplain = Diagnostic.shouldExplain(dia, ctx); if (shouldExplain && !message.explanation().isEmpty()) { rendered.append(explanation(message, ctx)); + messageBuilder.append(System.lineSeparator()).append(explanation(message, ctx)); } - delegate.log(new Problem(position, message.msg(), severity, rendered.toString(), diagnosticCode)); + delegate.log(new Problem(position, messageBuilder.toString(), severity, rendered.toString(), diagnosticCode)); } private static Severity severityOf(int level) { diff --git a/sbt-bridge/src/dotty/tools/xsbt/Problem.java b/sbt-bridge/src/dotty/tools/xsbt/Problem.java index b88b2637d314..29d64cc26c4a 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/Problem.java +++ b/sbt-bridge/src/dotty/tools/xsbt/Problem.java @@ -41,12 +41,19 @@ public Optional rendered() { } public Optional diagnosticCode() { - // NOTE: It's important for compatibility that we only construct a - // DiagnosticCode here to maintain compatibility with older versions of - // zinc while using this newer version of the compiler. If we would - // contstruct it earlier, you'd end up with ClassNotFoundExceptions for - // DiagnosticCode. - return Optional.of(new DiagnosticCode(_diagnosticCode, Optional.empty())); + // We don't forward the code if it's -1 since some tools will assume that this is actually + // the diagnostic code and show it or attempt to use it. This will ensure tools consuming + // this don't all have to be adding checks for -1. + if (_diagnosticCode == "-1") { + return Optional.empty(); + } else { + // NOTE: It's important for compatibility that we only construct a + // DiagnosticCode here to maintain compatibility with older versions of + // zinc while using this newer version of the compiler. If we would + // contstruct it earlier, you'd end up with ClassNotFoundExceptions for + // DiagnosticCode. + return Optional.of(new DiagnosticCode(_diagnosticCode, Optional.empty())); + } } @Override diff --git a/sbt-bridge/test/xsbt/ExtractAPISpecification.scala b/sbt-bridge/test/xsbt/ExtractAPISpecification.scala index dfbcbf2181a2..e85cf8989b0f 100644 --- a/sbt-bridge/test/xsbt/ExtractAPISpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractAPISpecification.scala @@ -147,9 +147,8 @@ class ExtractAPISpecification { |""".stripMargin val compilerForTesting = new ScalaCompilerForUnitTesting val apis = - compilerForTesting.extractApisFromSrcs(reuseCompilerInstance = false)(List(src1, src2), - List(src2)) - val _ :: src2Api1 :: src2Api2 :: Nil = apis.toList + compilerForTesting.extractApisFromSrcs(List(src1, src2), List(src2)) + val _ :: src2Api1 :: src2Api2 :: Nil = apis.toList: @unchecked val namerApi1 = selectNamer(src2Api1) val namerApi2 = selectNamer(src2Api2) assertTrue(SameAPI(namerApi1, namerApi2)) @@ -202,7 +201,7 @@ class ExtractAPISpecification { val srcC8 = "class C8 { self => }" val compilerForTesting = new ScalaCompilerForUnitTesting val apis = compilerForTesting - .extractApisFromSrcs(reuseCompilerInstance = true)( + .extractApisFromSrcs( List(srcX, srcY, srcC1, srcC2, srcC3, srcC4, srcC5, srcC6, srcC8) ) .map(_.head) diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala index ee50b3717213..819bedec3cbc 100644 --- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala @@ -79,10 +79,10 @@ class ExtractUsedNamesSpecification { val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcA, srcB, srcC, srcD) val scalaVersion = scala.util.Properties.versionNumberString val namesA = standardNames ++ Set("Nothing", "Any") - val namesAX = standardNames ++ objectStandardNames ++ Set("x", "T", "A", "Nothing", "Any", "scala") + val namesAX = standardNames ++ Set("x", "T", "A", "Nothing", "Any") val namesB = Set("A", "Int", "A;init;", "Unit") - val namesC = objectStandardNames ++ Set("B;init;", "B", "Unit") - val namesD = standardNames ++ objectStandardNames ++ Set("C", "X", "foo", "Int", "T") + val namesC = Set("B;init;", "B", "Unit") + val namesD = standardNames ++ Set("C", "X", "foo", "Int", "T") assertEquals(namesA, usedNames("A")) assertEquals(namesAX, usedNames("A.X")) assertEquals(namesB, usedNames("B")) @@ -131,13 +131,13 @@ class ExtractUsedNamesSpecification { val compilerForTesting = new ScalaCompilerForUnitTesting val usedNames = compilerForTesting.extractUsedNamesFromSrc(src1, src2) val expectedNames_lista = - standardNames ++ objectStandardNames ++ Set("B", "lista", "List", "A") + standardNames ++ Set("B", "lista", "List", "A") val expectedNames_at = - standardNames ++ objectStandardNames ++ Set("B", "at", "A", "T", "X0", "X1") + standardNames ++ Set("B", "at", "A", "T", "X0", "X1") val expectedNames_as = - standardNames ++ objectStandardNames ++ Set("B", "as", "S", "Y") + standardNames ++ Set("B", "as", "S", "Y") val expectedNames_foo = - standardNames ++ objectStandardNames ++ + standardNames ++ Set("B", "foo", "M", @@ -146,7 +146,7 @@ class ExtractUsedNamesSpecification { "???", "Nothing") val expectedNames_bar = - standardNames ++ objectStandardNames ++ + standardNames ++ Set("B", "bar", "P1", @@ -174,7 +174,7 @@ class ExtractUsedNamesSpecification { |""".stripMargin val compilerForTesting = new ScalaCompilerForUnitTesting val usedNames = compilerForTesting.extractUsedNamesFromSrc(srcFoo, srcBar) - val expectedNames = standardNames ++ objectStandardNames ++ Set("Outer", "TypeInner", "Inner", "Int") + val expectedNames = standardNames ++ Set("Outer", "TypeInner", "Inner", "Int") assertEquals(expectedNames, usedNames("Bar")) } @@ -227,7 +227,7 @@ class ExtractUsedNamesSpecification { def findPatMatUsages(in: String): Set[String] = { val compilerForTesting = new ScalaCompilerForUnitTesting val (_, callback) = - compilerForTesting.compileSrcs(List(List(sealedClass, in)), reuseCompilerInstance = false) + compilerForTesting.compileSrcs(List(List(sealedClass, in))) val clientNames = callback.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) val names: Set[String] = clientNames.flatMap { @@ -309,9 +309,4 @@ class ExtractUsedNamesSpecification { // the return type of the default constructor is Unit "Unit" ) - - private val objectStandardNames = Set( - // all Dotty objects extend scala.Serializable - "scala", "Serializable" - ) } diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index e81d58a07744..e58f9fefd92d 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -1,7 +1,7 @@ /** Adapted from https://github.com/sbt/sbt/blob/0.13/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala */ package xsbt -import xsbti.compile.SingleOutput +import xsbti.compile.{CompileProgress, SingleOutput} import java.io.File import xsbti._ import sbt.io.IO @@ -9,6 +9,8 @@ import xsbti.api.{ ClassLike, Def, DependencyContext } import DependencyContext._ import xsbt.api.SameAPI import sbt.internal.util.ConsoleLogger +import dotty.tools.io.PlainFile.toPlainFile +import dotty.tools.xsbt.CompilerBridge import TestCallback.ExtractedClassDependencies @@ -32,8 +34,8 @@ class ScalaCompilerForUnitTesting { * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ - def extractApisFromSrcs(reuseCompilerInstance: Boolean)(srcs: List[String]*): Seq[Seq[ClassLike]] = { - val (tempSrcFiles, analysisCallback) = compileSrcs(srcs.toList, reuseCompilerInstance) + def extractApisFromSrcs(srcs: List[String]*): Seq[Seq[ClassLike]] = { + val (tempSrcFiles, analysisCallback) = compileSrcs(srcs.toList) tempSrcFiles.map(analysisCallback.apis) } @@ -91,7 +93,7 @@ class ScalaCompilerForUnitTesting { * file system-independent way of testing dependencies between source code "files". */ def extractDependenciesFromSrcs(srcs: List[List[String]]): ExtractedClassDependencies = { - val (_, testCallback) = compileSrcs(srcs, reuseCompilerInstance = true) + val (_, testCallback) = compileSrcs(srcs) val memberRefDeps = testCallback.classDependencies collect { case (target, src, DependencyByMemberRef) => (src, target) @@ -117,50 +119,47 @@ class ScalaCompilerForUnitTesting { * useful to compile macros, which cannot be used in the same compilation run that * defines them. * - * The `reuseCompilerInstance` parameter controls whether the same Scala compiler instance - * is reused between compiling source groups. Separate compiler instances can be used to - * test stability of API representation (with respect to pickling) or to test handling of - * binary dependencies. - * * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - def compileSrcs(groupedSrcs: List[List[String]], - reuseCompilerInstance: Boolean): (Seq[File], TestCallback) = { - // withTemporaryDirectory { temp => - { + def compileSrcs(groupedSrcs: List[List[String]]): (Seq[File], TestCallback) = { val temp = IO.createTemporaryDirectory val analysisCallback = new TestCallback val classesDir = new File(temp, "classes") classesDir.mkdir() - lazy val commonCompilerInstanceAndCtx = prepareCompiler(classesDir, analysisCallback, classesDir.toString) + val bridge = new CompilerBridge val files = for ((compilationUnit, unitId) <- groupedSrcs.zipWithIndex) yield { - // use a separate instance of the compiler for each group of sources to - // have an ability to test for bugs in instability between source and pickled - // representation of types - val (compiler, ctx) = if (reuseCompilerInstance) commonCompilerInstanceAndCtx else - prepareCompiler(classesDir, analysisCallback, classesDir.toString) - val run = compiler.newRun(ctx) - val srcFiles = compilationUnit.toSeq.zipWithIndex map { - case (src, i) => + val srcFiles = compilationUnit.toSeq.zipWithIndex.map { + (src, i) => val fileName = s"Test-$unitId-$i.scala" prepareSrcFile(temp, fileName, src) } - val srcFilePaths = srcFiles.map(srcFile => srcFile.getAbsolutePath).toList - run.compile(srcFilePaths) + val virtualSrcFiles = srcFiles.map(file => TestVirtualFile(file.toPath)).toArray + val classesDirPath = classesDir.getAbsolutePath.toString + val output = new SingleOutput: + def getOutputDirectory() = classesDir + + bridge.run( + virtualSrcFiles.toArray, + new TestDependencyChanges, + Array("-Yforce-sbt-phases", "-classpath", classesDirPath, "-usejavacp", "-d", classesDirPath), + output, + analysisCallback, + new TestReporter, + new CompileProgress {}, + new TestLogger + ) - // srcFilePaths.foreach(f => new File(f).delete) srcFiles } (files.flatten.toSeq, analysisCallback) - } } def compileSrcs(srcs: String*): (Seq[File], TestCallback) = { - compileSrcs(List(srcs.toList), reuseCompilerInstance = true) + compileSrcs(List(srcs.toList)) } private def prepareSrcFile(baseDir: File, fileName: String, src: String): File = { @@ -168,28 +167,5 @@ class ScalaCompilerForUnitTesting { IO.write(srcFile, src) srcFile } - - private def prepareCompiler(outputDir: File, analysisCallback: AnalysisCallback, classpath: String = ".") = { - val args = Array.empty[String] - - import dotty.tools.dotc.{Compiler, Driver} - import dotty.tools.dotc.core.Contexts._ - - val driver = new TestDriver - val ctx = (new ContextBase).initialCtx.fresh.setSbtCallback(analysisCallback) - driver.getCompiler(Array("-classpath", classpath, "-usejavacp", "-d", outputDir.getAbsolutePath), ctx) - } - - private object ConsoleReporter extends Reporter { - def reset(): Unit = () - def hasErrors: Boolean = false - def hasWarnings: Boolean = false - def printWarnings(): Unit = () - def problems(): Array[xsbti.Problem] = Array.empty - def log(problem: xsbti.Problem): Unit = println(problem.message) - def comment(pos: Position, msg: String): Unit = () - def printSummary(): Unit = () - } - } diff --git a/sbt-bridge/test/xsbt/TestDependencyChanges.scala b/sbt-bridge/test/xsbt/TestDependencyChanges.scala new file mode 100644 index 000000000000..f31a314ba036 --- /dev/null +++ b/sbt-bridge/test/xsbt/TestDependencyChanges.scala @@ -0,0 +1,9 @@ +package xsbt + +import xsbti.compile.* + +class TestDependencyChanges extends DependencyChanges: + def isEmpty(): Boolean = ??? + def modifiedBinaries(): Array[java.io.File] = ??? + def modifiedClasses(): Array[String] = ??? + def modifiedLibraries(): Array[xsbti.VirtualFileRef] = ??? diff --git a/sbt-bridge/test/xsbt/TestDriver.scala b/sbt-bridge/test/xsbt/TestDriver.scala deleted file mode 100644 index 790c14f4b912..000000000000 --- a/sbt-bridge/test/xsbt/TestDriver.scala +++ /dev/null @@ -1,13 +0,0 @@ -package xsbt - -import dotty.tools.dotc._ -import core.Contexts._ - -class TestDriver extends Driver { - override protected def sourcesRequired = false - - def getCompiler(args: Array[String], rootCtx: Context) = { - val (fileNames, ctx) = setup(args, rootCtx) - (newCompiler(ctx), ctx) - } -} diff --git a/sbt-bridge/test/xsbt/TestLogger.scala b/sbt-bridge/test/xsbt/TestLogger.scala new file mode 100644 index 000000000000..598887e3f8e6 --- /dev/null +++ b/sbt-bridge/test/xsbt/TestLogger.scala @@ -0,0 +1,12 @@ +package xsbt + +import java.util.function.Supplier + +import xsbti.* + +class TestLogger extends Logger: + override def debug(msg: Supplier[String]): Unit = () + override def error(msg: Supplier[String]): Unit = () + override def info(msg: Supplier[String]): Unit = () + override def warn(msg: Supplier[String]): Unit = () + override def trace(exception: Supplier[Throwable]): Unit = () diff --git a/sbt-bridge/test/xsbt/TestReporter.scala b/sbt-bridge/test/xsbt/TestReporter.scala new file mode 100644 index 000000000000..cab9823813a6 --- /dev/null +++ b/sbt-bridge/test/xsbt/TestReporter.scala @@ -0,0 +1,13 @@ +package xsbt + +import xsbti.* + +class TestReporter extends Reporter: + private val allProblems = collection.mutable.ListBuffer.empty[Problem] + def comment(position: Position, msg: String): Unit = () + def hasErrors(): Boolean = allProblems.exists(_.severity == Severity.Error) + def hasWarnings(): Boolean = allProblems.exists(_.severity == Severity.Warn) + def log(problem: Problem): Unit = allProblems.append(problem) + def printSummary(): Unit = () + def problems(): Array[Problem] = allProblems.toArray + def reset(): Unit = allProblems.clear() diff --git a/sbt-bridge/test/xsbt/TestVirtualFile.scala b/sbt-bridge/test/xsbt/TestVirtualFile.scala new file mode 100644 index 000000000000..db00038272a8 --- /dev/null +++ b/sbt-bridge/test/xsbt/TestVirtualFile.scala @@ -0,0 +1,14 @@ +package xsbt + +import xsbti.PathBasedFile +import java.nio.file.{Files, Path} +import scala.io.Source +import scala.io.Codec + +class TestVirtualFile(path: Path) extends PathBasedFile: + override def contentHash(): Long = ??? + override def input(): java.io.InputStream = Files.newInputStream(path) + override def id(): String = name() + override def name(): String = path.toFile.getName + override def names(): Array[String] = ??? + override def toPath(): Path = path diff --git a/sbt-bridge/test/xsbti/TestCallback.scala b/sbt-bridge/test/xsbti/TestCallback.scala index 3348fd2d90f3..a0919dc69bc4 100644 --- a/sbt-bridge/test/xsbti/TestCallback.scala +++ b/sbt-bridge/test/xsbti/TestCallback.scala @@ -2,7 +2,9 @@ package xsbti import java.io.File +import java.nio.file.Path import scala.collection.mutable.ArrayBuffer +import xsbti.VirtualFileRef import xsbti.api.ClassLike import xsbti.api.DependencyContext import DependencyContext._ @@ -24,12 +26,14 @@ class TestCallback extends AnalysisCallback assert(!apis.contains(source), s"startSource can be called only once per source file: $source") apis(source) = Seq.empty } + override def startSource(source: VirtualFile): Unit = ??? override def binaryDependency(binary: File, name: String, fromClassName: String, source: File, context: DependencyContext): Unit = { binaryDependencies += ((binary, name, fromClassName, source, context)) } + override def binaryDependency(binary: Path, name: String, fromClassName: String, source: VirtualFileRef, context: DependencyContext): Unit = ??? - def generatedNonLocalClass(source: File, + override def generatedNonLocalClass(source: File, module: File, binaryClassName: String, srcClassName: String): Unit = { @@ -37,12 +41,13 @@ class TestCallback extends AnalysisCallback classNames(source) += ((srcClassName, binaryClassName)) () } + override def generatedNonLocalClass(source: VirtualFileRef, module: Path, binaryClassName: String, srcClassName: String): Unit = ??? - def generatedLocalClass(source: File, module: File): Unit = { + override def generatedLocalClass(source: File, module: File): Unit = { products += ((source, module)) () } - + override def generatedLocalClass(source: VirtualFileRef, module: Path): Unit = ??? override def classDependency(onClassName: String, sourceClassName: String, context: DependencyContext): Unit = { if (onClassName != sourceClassName) classDependencies += ((onClassName, sourceClassName, context)) @@ -51,15 +56,23 @@ class TestCallback extends AnalysisCallback override def usedName(className: String, name: String, scopes: EnumSet[UseScope]): Unit = { usedNamesAndScopes(className) += TestUsedName(name, scopes) } + override def api(source: File, classApi: ClassLike): Unit = { apis(source) = classApi +: apis(source) } + override def api(source: VirtualFileRef, classApi: ClassLike): Unit = ??? + override def problem(category: String, pos: xsbti.Position, message: String, severity: xsbti.Severity, reported: Boolean): Unit = () override def dependencyPhaseCompleted(): Unit = () override def apiPhaseCompleted(): Unit = () override def enabled(): Boolean = true - def mainClass(source: File, className: String): Unit = () + override def mainClass(source: File, className: String): Unit = () + override def mainClass(source: VirtualFileRef, className: String): Unit = ??? + + override def classesInOutputJar(): java.util.Set[String] = ??? + override def getPickleJarPair(): java.util.Optional[xsbti.T2[Path, Path]] = ??? + override def isPickleJava(): Boolean = ??? } object TestCallback { @@ -78,14 +91,8 @@ object TestCallback { } private def pairsToMultiMap[A, B](pairs: collection.Seq[(A, B)]): Map[A, Set[B]] = { - import scala.collection.mutable.{ HashMap, MultiMap } - val emptyMultiMap = new HashMap[A, scala.collection.mutable.Set[B]] with MultiMap[A, B] - val multiMap = pairs.foldLeft(emptyMultiMap) { - case (acc, (key, value)) => - acc.addBinding(key, value) - } - // convert all collections to immutable variants - multiMap.toMap.view.mapValues(_.toSet).toMap.withDefaultValue(Set.empty) + pairs.groupBy(_._1).view.mapValues(values => values.map(_._2).toSet) + .toMap.withDefaultValue(Set.empty) } } } diff --git a/sbt-test/java-compat/i15288/QueryRequest.java b/sbt-test/java-compat/i15288/QueryRequest.java new file mode 100644 index 000000000000..e43487e09449 --- /dev/null +++ b/sbt-test/java-compat/i15288/QueryRequest.java @@ -0,0 +1,9 @@ +interface CopyableBuilder {} +interface ToCopyableBuilder {} + +public class QueryRequest implements ToCopyableBuilder { + public static Builder builder() { throw new UnsupportedOperationException(); } + public interface Builder extends CopyableBuilder { + void build(); + } +} diff --git a/sbt-test/java-compat/i15288/Test.scala b/sbt-test/java-compat/i15288/Test.scala new file mode 100644 index 000000000000..e03617ac4c33 --- /dev/null +++ b/sbt-test/java-compat/i15288/Test.scala @@ -0,0 +1,2 @@ +class Test: + def makeQuery = QueryRequest.builder().build() diff --git a/sbt-test/java-compat/i15288/build.sbt b/sbt-test/java-compat/i15288/build.sbt new file mode 100644 index 000000000000..63e314982c41 --- /dev/null +++ b/sbt-test/java-compat/i15288/build.sbt @@ -0,0 +1 @@ +scalaVersion := sys.props("plugin.scalaVersion") diff --git a/sbt-test/java-compat/i15288/test b/sbt-test/java-compat/i15288/test new file mode 100644 index 000000000000..ad1a8a5987ee --- /dev/null +++ b/sbt-test/java-compat/i15288/test @@ -0,0 +1,5 @@ +## This could just be a pos test checked by FromTastyTests, but +## ParallelTesting#compileTastyInDir does not support test with multiple files +## currently. +> compile +> doc diff --git a/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties b/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties index 22af2628c413..46e43a97ed86 100644 --- a/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties +++ b/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties @@ -1 +1 @@ -sbt.version=1.7.1 +sbt.version=1.8.2 diff --git a/sbt-test/sbt-dotty/tasty-inspector-jars/build.sbt b/sbt-test/sbt-dotty/tasty-inspector-jars/build.sbt index 59dd85290bf0..3bece1b43fa7 100644 --- a/sbt-test/sbt-dotty/tasty-inspector-jars/build.sbt +++ b/sbt-test/sbt-dotty/tasty-inspector-jars/build.sbt @@ -15,7 +15,7 @@ lazy val inspector = project .settings( scalaVersion := dottyVersion, libraryDependencies += "org.scala-lang" %% "scala3-tasty-inspector" % scalaVersion.value, - runTest := + runTest := Def.sequential( Def.task(IO.copyFile((lib/Compile/packageBin).value, jarDest)), (Compile/run).toTask(" " + jarDest.getAbsolutePath) diff --git a/sbt-test/scala2-compat/i16351/app/App.scala b/sbt-test/scala2-compat/i16351/app/App.scala new file mode 100644 index 000000000000..5c152f515ada --- /dev/null +++ b/sbt-test/scala2-compat/i16351/app/App.scala @@ -0,0 +1,8 @@ +package app + +import lib.* + +object App { + def main(args: Array[String]): Unit = + new Lib(Value("Foo"), b = 2) {} +} diff --git a/sbt-test/scala2-compat/i16351/build.sbt b/sbt-test/scala2-compat/i16351/build.sbt new file mode 100644 index 000000000000..433a5e8baddf --- /dev/null +++ b/sbt-test/scala2-compat/i16351/build.sbt @@ -0,0 +1,13 @@ +val scala3Version = sys.props("plugin.scalaVersion") +val scala2Version = sys.props("plugin.scala2Version") + +lazy val lib = project.in(file("lib")) + .settings( + scalaVersion := scala2Version + ) + +lazy val app = project.in(file("app")) + .dependsOn(lib) + .settings( + scalaVersion := scala3Version + ) diff --git a/sbt-test/scala2-compat/i16351/lib/lib.scala b/sbt-test/scala2-compat/i16351/lib/lib.scala new file mode 100644 index 000000000000..cfc3c6c780d9 --- /dev/null +++ b/sbt-test/scala2-compat/i16351/lib/lib.scala @@ -0,0 +1,10 @@ +// Should be compiled with 2.13 +package lib + +class Value(val value: String) + +class Lib( + value: => Value, + a: Int = 0, + b: Int +) diff --git a/sbt-test/scala2-compat/i16351/test b/sbt-test/scala2-compat/i16351/test new file mode 100644 index 000000000000..63092ffa4a03 --- /dev/null +++ b/sbt-test/scala2-compat/i16351/test @@ -0,0 +1 @@ +> app/run diff --git a/sbt-test/source-dependencies/implicit-search/changes/A1.scala b/sbt-test/source-dependencies/implicit-search/changes/A1.scala index 7aa91d096277..69c493db2131 100644 --- a/sbt-test/source-dependencies/implicit-search/changes/A1.scala +++ b/sbt-test/source-dependencies/implicit-search/changes/A1.scala @@ -1 +1 @@ -object A +object A diff --git a/sbt-test/source-dependencies/inline-rec-change-inline/B.scala b/sbt-test/source-dependencies/inline-rec-change-inline/B.scala index 61e61a620957..eaeef8d57ece 100644 --- a/sbt-test/source-dependencies/inline-rec-change-inline/B.scala +++ b/sbt-test/source-dependencies/inline-rec-change-inline/B.scala @@ -1,5 +1,5 @@ object B { - inline def inlinedAny(x: String): x.type = x + inline def inlinedAny(x: String): String = x } diff --git a/sbt-test/source-dependencies/inline-rec-change-inline/changes/B1.scala b/sbt-test/source-dependencies/inline-rec-change-inline/changes/B1.scala index 4a1c47d38572..63104570fed4 100644 --- a/sbt-test/source-dependencies/inline-rec-change-inline/changes/B1.scala +++ b/sbt-test/source-dependencies/inline-rec-change-inline/changes/B1.scala @@ -1,5 +1,5 @@ object B { - inline def inlinedAny(inline x: String): x.type = x + inline def inlinedAny(inline x: String): String = x } diff --git a/sbt-test/source-dependencies/java-static/test b/sbt-test/source-dependencies/java-static/test index 42890ca74f4d..0bb6f50169e4 100644 --- a/sbt-test/source-dependencies/java-static/test +++ b/sbt-test/source-dependencies/java-static/test @@ -2,7 +2,7 @@ # the statics as an object without a file and so the Analyzer must know to look for the # object's linked class. # This test verifies this happens. -# The test compiles a Java class with a static field. +# The test compiles a Java class with a static field. # It then adds a Scala object that references the static field. Because the object only depends on a # static member and because the Java source is not included in the compilation (since it didn't change), # this triggers the special case above. diff --git a/sbt-test/source-dependencies/restore-classes/changes/A2.scala b/sbt-test/source-dependencies/restore-classes/changes/A2.scala index 10d738255cca..778f16ab95cc 100644 --- a/sbt-test/source-dependencies/restore-classes/changes/A2.scala +++ b/sbt-test/source-dependencies/restore-classes/changes/A2.scala @@ -2,4 +2,4 @@ object A { val x = "a" } -class C +class C diff --git a/sbt-test/source-dependencies/sam/A.scala b/sbt-test/source-dependencies/sam/A.scala new file mode 100644 index 000000000000..eb870b8332b6 --- /dev/null +++ b/sbt-test/source-dependencies/sam/A.scala @@ -0,0 +1,3 @@ +trait A { + def foo(): Int +} diff --git a/sbt-test/source-dependencies/sam/B.scala b/sbt-test/source-dependencies/sam/B.scala new file mode 100644 index 000000000000..87dfb28cdb33 --- /dev/null +++ b/sbt-test/source-dependencies/sam/B.scala @@ -0,0 +1,2 @@ +class B: + val f: A = () => 1 diff --git a/sbt-test/source-dependencies/sam/build.sbt b/sbt-test/source-dependencies/sam/build.sbt new file mode 100644 index 000000000000..63e314982c41 --- /dev/null +++ b/sbt-test/source-dependencies/sam/build.sbt @@ -0,0 +1 @@ +scalaVersion := sys.props("plugin.scalaVersion") diff --git a/sbt-test/source-dependencies/sam/changes/A.scala b/sbt-test/source-dependencies/sam/changes/A.scala new file mode 100644 index 000000000000..e9b339f2d1a4 --- /dev/null +++ b/sbt-test/source-dependencies/sam/changes/A.scala @@ -0,0 +1,3 @@ +trait A { + def foo(): String +} diff --git a/sbt-test/source-dependencies/sam/test b/sbt-test/source-dependencies/sam/test new file mode 100644 index 000000000000..3c4c9a0f001b --- /dev/null +++ b/sbt-test/source-dependencies/sam/test @@ -0,0 +1,7 @@ +> compile + +# change the SAM type +$ copy-file changes/A.scala A.scala + +# Both A.scala and B.scala should be recompiled, producing a compile error +-> compile diff --git a/scaladoc-testcases/src/example/typeAndObjects/binaryops.scala b/scaladoc-testcases/src/example/typeAndObjects/binaryops.scala index 890d39732ca8..6474d04f91ef 100644 --- a/scaladoc-testcases/src/example/typeAndObjects/binaryops.scala +++ b/scaladoc-testcases/src/example/typeAndObjects/binaryops.scala @@ -5,7 +5,7 @@ sealed trait Expr object Expr{ case class BinaryOp(offset: Int, lhs: Expr, op: BinaryOp.Op, rhs: Expr) extends Expr - + object BinaryOp{ sealed trait Op case object `<<` extends Op diff --git a/scaladoc-testcases/src/tests/contextBounds.scala b/scaladoc-testcases/src/tests/contextBounds.scala index 1925f7f40994..794af0b8b8f8 100644 --- a/scaladoc-testcases/src/tests/contextBounds.scala +++ b/scaladoc-testcases/src/tests/contextBounds.scala @@ -25,15 +25,15 @@ class A: def a[T <: String | Int : ([T] =>> T match { case String => A case Int => B })](t: T): T = t - def falsePositive[T](evidence$1: ClassTag[T]): Int + def falsePositive[T](evidence$1: ClassTag[T]): Int = 1 // Scala spec stats that behaviour of names with `$` is undefined. // Scaladoc documents definition below as `def falsePositive2[T: ClassTag]: Int` // that is equivalent of methods below - // def falsePositive2[T](implicit evidence$3: ClassTag[T]): Int + // def falsePositive2[T](implicit evidence$3: ClassTag[T]): Int // = 1 class Outer[A]: - def falsePositiveInner[T](implicit evidence$3: ClassTag[A]): Int + def falsePositiveInner[T](implicit evidence$3: ClassTag[A]): Int = 1 \ No newline at end of file diff --git a/scaladoc-testcases/src/tests/extendsCall.scala b/scaladoc-testcases/src/tests/extendsCall.scala new file mode 100644 index 000000000000..b90af8162e15 --- /dev/null +++ b/scaladoc-testcases/src/tests/extendsCall.scala @@ -0,0 +1,6 @@ +package tests +package extendsCall + +class Impl() extends Base(Seq.empty, c = "-") //expected: class Impl() extends Base + +class Base(val a: Seq[String], val b: String = "", val c: String = "") //expected: class Base(val a: Seq[String], val b: String, val c: String) diff --git a/scaladoc-testcases/src/tests/extensionParams.scala b/scaladoc-testcases/src/tests/extensionParams.scala index 231a8a1fefbf..0e2225d8aa3c 100644 --- a/scaladoc-testcases/src/tests/extensionParams.scala +++ b/scaladoc-testcases/src/tests/extensionParams.scala @@ -1,22 +1,73 @@ package tests.extensionParams +trait Animal + extension [A](thiz: A) - def toTuple2[B](that: B): (A, B) = thiz -> that + def toTuple2[B](that: B): (A, B) + = thiz -> that extension [A](a: A)(using Int) - def f[B](b: B): (A, B) = ??? + def f1[B](b: B): (A, B) + = ??? -extension [A](a: A)(using Int) - def ff(b: A): (A, A) = ??? +extension [A](a: A)(using String) + def f2(b: A): (A, A) + = ??? -extension [A](a: A)(using Int) - def fff(using String)(b: A): (A, A) = ??? +extension [A](a: A)(using Number) + def f3(using String)(b: A): (A, A) + = ??? extension (a: Char)(using Int) - def ffff(using String)(b: Int): Unit = ??? + def f4(using String)(b: Int): Unit + = ??? + +extension (a: String)(using Int) + def f5[B](using String)(b: B): Unit + = ??? + +extension [A <: List[Char]](a: Int)(using Int) + def f6[B](b: B): (A, B) + = ??? + +extension [A <: List[Char]](using String)(using Unit)(a: A)(using Int)(using Number) + def f7[B, C](b: B)(c: C): (A, B) + = ??? + +extension [A <: List[Char]](using String)(using Number)(a: A)(using Int)(using Unit) + def f8(b: Any)(c: Any): Any + = ??? + +extension [A <: List[Char]](using Unit)(using String)(a: A)(using Int)(using Number) + def f9[B, C](using Int)(b: B)(c: C): (A, B) + = ??? + +extension [A <: List[Char]](using Number)(using Unit)(a: A)(using Int)(using String) + def f10(using Int)(b: Any)(c: Any): Any + = ??? + + def f12(using Int)(b: A)(c: String): Number + = ??? + +extension (using String)(using Unit)(a: Animal)(using Int)(using Number) + def f11(b: Any)(c: Any): Any + = ??? + +extension (a: Int) + def f13(): Any + = ??? + +extension (using Unit)(a: Int) + def f14(): Any + = ??? + +import scala.language.experimental.clauseInterleaving + +extension (using String)(using Int)(a: Animal)(using Unit)(using Number) + def f16(b: Any)[T](c: T): T + = ??? + def f17[D](b: D)[T](c: T): T + = ??? + -extension (a: Char)(using Int) - def fffff[B](using String)(b: B): Unit = ??? -extension [A <: List[Char]](a: A)(using Int) - def ffffff[B](b: B): (A, B) = ??? diff --git a/scaladoc-testcases/src/tests/methodsAndConstructors.scala b/scaladoc-testcases/src/tests/methodsAndConstructors.scala index b8925c593b4c..132d35035b30 100644 --- a/scaladoc-testcases/src/tests/methodsAndConstructors.scala +++ b/scaladoc-testcases/src/tests/methodsAndConstructors.scala @@ -60,3 +60,8 @@ class Methods: def withImplicitParam2(v: String)(implicit ab: Double, a: Int, b: String): String = ??? + import scala.language.experimental.clauseInterleaving + + def clauseInterleaving[T](x: T)[U](y: U)(using (T, U)): (T, U) + = ??? + diff --git a/scaladoc-testcases/src/tests/nonScala3Parent.scala b/scaladoc-testcases/src/tests/nonScala3Parent.scala new file mode 100644 index 000000000000..91183d25b583 --- /dev/null +++ b/scaladoc-testcases/src/tests/nonScala3Parent.scala @@ -0,0 +1,13 @@ +package tests +package nonScala3Parent + +import javax.swing.JPanel +import javax.swing.JFrame + +// https://github.com/lampepfl/dotty/issues/15927 + +trait Foo1 extends Numeric[Any] +trait Foo2 extends JPanel +trait Foo3 extends JFrame +trait Foo4 extends Ordering[Any] +trait Foo5 extends Enumeration diff --git a/scaladoc-testcases/src/tests/snippetComments.scala b/scaladoc-testcases/src/tests/snippetComments.scala index 39b15648103e..9f54b8a465f1 100644 --- a/scaladoc-testcases/src/tests/snippetComments.scala +++ b/scaladoc-testcases/src/tests/snippetComments.scala @@ -3,7 +3,7 @@ package tests.snippetComments /** * This is my codeblock - * + * * ``` * //{{ * import xd @@ -20,7 +20,7 @@ package tests.snippetComments * val y = 2 // comment in the same line * // comment in new line * val z = 3 - * + * * //{{ * val hideMe = 7 * //}} diff --git a/scaladoc/README.md b/scaladoc/README.md index 5f7560372976..774543996c7a 100644 --- a/scaladoc/README.md +++ b/scaladoc/README.md @@ -40,7 +40,7 @@ the documentation won't work completely if you don't. ## CLI and SBT Documentation The preferred way to use scaladoc is calling it from sbt `Compile/doc` task or to use CLI interface provided inside `dotty/bin/scaladoc` bash script. -More information about specific scaladoc flags you can find inside [Usage docs](https://dotty.epfl.ch/docs/usage/scaladoc/settings.html) +More information about specific scaladoc flags you can find inside [Usage docs](https://docs.scala-lang.org/scala3/guides/scaladoc/settings.html) ## Developing diff --git a/scaladoc/resources/dotty_res/scripts/ux.js b/scaladoc/resources/dotty_res/scripts/ux.js index 304f2af9e129..0ead006af84d 100644 --- a/scaladoc/resources/dotty_res/scripts/ux.js +++ b/scaladoc/resources/dotty_res/scripts/ux.js @@ -309,6 +309,10 @@ document var selected = document.getElementById(location.hash.substring(1)); if (selected) { selected.classList.toggle("expand"); + selected.classList.toggle("expanded"); + const btn = selected.querySelector(".icon-button"); + btn.classList.toggle("expand"); + btn.classList.toggle("expanded"); } } } diff --git a/scaladoc/resources/dotty_res/styles/scalastyle.css b/scaladoc/resources/dotty_res/styles/scalastyle.css index a14af7f7ae2d..3efcfddd370a 100644 --- a/scaladoc/resources/dotty_res/styles/scalastyle.css +++ b/scaladoc/resources/dotty_res/styles/scalastyle.css @@ -962,8 +962,7 @@ footer .socials { color: var(--type); } -.signature *[t="t"] { - /* Types with links */ +.signature *[t="t"] { /* Types with links */ color: var(--type-link); } diff --git a/scaladoc/resources/dotty_res/styles/theme/components/api-member.css b/scaladoc/resources/dotty_res/styles/theme/components/api-member.css index c1a491815201..47b64c304a70 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/api-member.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/api-member.css @@ -42,6 +42,10 @@ margin-block-end: 0; } +.documentableElement .doc img { + max-width: 100%; +} + .documentableElement .annotations { display: none; } diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/container.css b/scaladoc/resources/dotty_res/styles/theme/layout/container.css index 849235e2fa82..53ede0e3dfff 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/container.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/container.css @@ -19,6 +19,11 @@ p { --header-height: calc(8 * var(--base-spacing)); } +.site-container img{ + max-width: 100%; + height: auto; +} + /* Scrollbar */ ::-webkit-scrollbar { diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/content.css b/scaladoc/resources/dotty_res/styles/theme/layout/content.css index 0ba37d0752b0..39a7b053752d 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/content.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/content.css @@ -8,6 +8,19 @@ scroll-behavior: smooth; } +/* blog footer */ +.blog-author { + color: gray; +} + +.blog-author img#author-img { + width: auto; + height: auto; + max-width:100px; + max-height:100px; + border-radius: 50%; +} + #content { display: flex; flex-flow: row; @@ -91,6 +104,7 @@ @media (max-width: 768px) { #content { + padding-top: calc(10 * var(--base-spacing)); padding-bottom: calc(6 * var(--base-spacing)); } @@ -143,9 +157,9 @@ } #content h2 { - color: var(--text-primary); - margin-block-start: calc(6* var(--base-spacing)); - margin-block-end: calc(3* var(--base-spacing)); + color: var(--text-primary); + margin-block-start: calc(6* var(--base-spacing)); + margin-block-end: calc(3* var(--base-spacing)); } #content .cover > h2 { @@ -166,7 +180,7 @@ /* content first paragraph */ .first-p { - margin-block-start: calc(2* var(--base-spacing)); + margin-block-start: calc(2* var(--base-spacing)); } #content .first-p { @@ -226,18 +240,32 @@ /* content link */ #content a { - color: var(--text-primary); - text-decoration: underline solid 1px; - -webkit-text-decoration-line: underline; /* Safari */ - text-decoration-line: underline; - text-underline-offset: 2px; - transition: text-decoration-color .2s ease-in-out; + color: var(--text-primary); + text-decoration: underline solid 1px; + -webkit-text-decoration-line: underline; /* Safari */ + text-decoration-line: underline; + text-underline-offset: 2px; + transition: text-decoration-color .2s ease-in-out; } #content a:hover { text-decoration-color: transparent; } +#content a.anchor { + color: transparent; + margin-left: -23px; + padding-right: 3px; + transition: color .4s ease-out; +} + +#content a.anchor::before { + content: "\f0c1"; + font-family: "Font Awesome 5 Free"; + font-weight: 900; + font-size: 20px; +} + #content .cover-header { margin-block-end: calc(2 * var(--base-spacing)); } diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/header.css b/scaladoc/resources/dotty_res/styles/theme/layout/header.css index 034f9ed43087..85e6b0240899 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/header.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/header.css @@ -63,6 +63,14 @@ align-items: center; } +.logo-container .project-logo { + max-width: 40px; +} + +.logo-container .project-logo img { + max-width: 100%; +} + #mobile-menu-toggle { display: none; } @@ -72,7 +80,7 @@ overflow: hidden; white-space: nowrap; text-overflow: ellipsis; - width: calc(9 * var(--base-spacing)); + width: auto; } .single { @@ -89,6 +97,7 @@ #mobile-menu-toggle { margin-left: calc(3 * var(--base-spacing)); display: block; + padding: 16px; } .header-container-right .text-button { @@ -102,6 +111,10 @@ #search-toggle { display: none; } + + .projectVersion{ + max-width: calc(12 * var(--base-spacing)); + } } diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css b/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css index a7c08eedb4be..6fa692ab4662 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css @@ -154,6 +154,8 @@ #mobile-menu-close { margin-left: auto; + width: 48px; + height: 48px; } #mobile-menu-close:disabled { diff --git a/scaladoc/src/dotty/tools/scaladoc/DocContext.scala b/scaladoc/src/dotty/tools/scaladoc/DocContext.scala index 7f208daff29a..acc93ccb332f 100644 --- a/scaladoc/src/dotty/tools/scaladoc/DocContext.scala +++ b/scaladoc/src/dotty/tools/scaladoc/DocContext.scala @@ -1,11 +1,9 @@ package dotty.tools.scaladoc import java.io.File -import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths -import scala.jdk.CollectionConverters._ import dotty.tools.scaladoc.site.StaticSiteContext import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.util.SourceFile @@ -13,9 +11,6 @@ import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans import java.io.ByteArrayOutputStream import java.io.PrintStream -import scala.io.Codec -import java.net.URL -import scala.util.Try import scala.collection.mutable import dotty.tools.scaladoc.util.Check.checkJekyllIncompatPath diff --git a/scaladoc/src/dotty/tools/scaladoc/ExternalDocLink.scala b/scaladoc/src/dotty/tools/scaladoc/ExternalDocLink.scala index 97e0d309d6b8..536d759388f3 100644 --- a/scaladoc/src/dotty/tools/scaladoc/ExternalDocLink.scala +++ b/scaladoc/src/dotty/tools/scaladoc/ExternalDocLink.scala @@ -1,6 +1,6 @@ package dotty.tools.scaladoc -import java.net.URL +import java.net.{URI, URL} import scala.util.matching._ import scala.util.{ Try, Success, Failure } @@ -25,10 +25,12 @@ object ExternalDocLink: case Failure(e) => fail(mapping, s"Unable to parse $descr. Exception $e occured") } + private def stripIndex(url: String): String = url.stripSuffix("index.html").stripSuffix("/") + "/" + def parseLegacy(mapping: String): Either[String, ExternalDocLink] = mapping.split("#").toList match case path :: apiUrl :: Nil => for { - url <- tryParse(mapping, "url")(URL(apiUrl)) + url <- tryParse(mapping, "url")(URI(stripIndex(apiUrl)).toURL) } yield ExternalDocLink( List(s"${Regex.quote(path)}.*".r), url, @@ -40,7 +42,7 @@ object ExternalDocLink: def parse(mapping: String): Either[String, ExternalDocLink] = def parsePackageList(elements: List[String]) = elements match - case List(urlStr) => tryParse(mapping, "packageList")(Some(URL(urlStr))) + case List(urlStr) => tryParse(mapping, "packageList")(Some(URI(urlStr).toURL)) case Nil => Right(None) case other => fail(mapping, s"Provided multiple package lists: $other") @@ -55,7 +57,7 @@ object ExternalDocLink: case regexStr :: docToolStr :: urlStr :: rest => for { regex <- tryParse(mapping, "regex")(regexStr.r) - url <- tryParse(mapping, "url")(URL(urlStr)) + url <- tryParse(mapping, "url")(URI(stripIndex(urlStr)).toURL) doctool <- doctoolByName(docToolStr) packageList <- parsePackageList(rest) } yield ExternalDocLink( diff --git a/scaladoc/src/dotty/tools/scaladoc/Main.scala b/scaladoc/src/dotty/tools/scaladoc/Main.scala index da35e63561fd..36b8b1daf4c4 100644 --- a/scaladoc/src/dotty/tools/scaladoc/Main.scala +++ b/scaladoc/src/dotty/tools/scaladoc/Main.scala @@ -1,16 +1,6 @@ package dotty.tools.scaladoc -import java.util.ServiceLoader -import java.io.File -import java.util.jar._ -import scala.jdk.CollectionConverters._ -import collection.immutable.ArraySeq - -import java.nio.file.Files - -import dotty.tools.dotc.config.Settings._ -import dotty.tools.dotc.config.CommonScalaSettings -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.ContextBase /** Main class for the doctool when used from cli. */ class Main: diff --git a/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala b/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala index da34e97efdf5..fa02e87548e6 100644 --- a/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala +++ b/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala @@ -1,20 +1,15 @@ package dotty.tools.scaladoc -import java.util.ServiceLoader import java.io.File import java.io.FileWriter -import java.util.jar._ -import scala.jdk.CollectionConverters._ -import collection.immutable.ArraySeq +import java.nio.file.Paths -import java.nio.file.{ Files, Paths } +import collection.immutable.ArraySeq import dotty.tools.dotc.config.Settings._ import dotty.tools.dotc.config.{ CommonScalaSettings, AllScalaSettings } import dotty.tools.dotc.reporting.Reporter import dotty.tools.dotc.core.Contexts._ - -import dotty.tools.scaladoc.Inkuire import dotty.tools.scaladoc.Inkuire._ object Scaladoc: diff --git a/scaladoc/src/dotty/tools/scaladoc/ScaladocCommand.scala b/scaladoc/src/dotty/tools/scaladoc/ScaladocCommand.scala index b91b8307208b..8b438a27f33e 100644 --- a/scaladoc/src/dotty/tools/scaladoc/ScaladocCommand.scala +++ b/scaladoc/src/dotty/tools/scaladoc/ScaladocCommand.scala @@ -1,20 +1,9 @@ package dotty.tools.scaladoc -import java.util.ServiceLoader -import java.io.File -import java.util.jar._ -import scala.jdk.CollectionConverters._ -import collection.immutable.ArraySeq - -import java.nio.file.Files - import dotty.tools.dotc.config.Settings._ -import dotty.tools.dotc.config.CommonScalaSettings -import dotty.tools.scaladoc.Scaladoc._ -import dotty.tools.dotc.config.Settings.Setting.value import dotty.tools.dotc.config.Properties._ import dotty.tools.dotc.config.CliCommand -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.Context object ScaladocCommand extends CliCommand: type ConcreteSettings = ScaladocSettings diff --git a/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala b/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala index ee7c6cd4980f..96e7854b45cf 100644 --- a/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala +++ b/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala @@ -1,20 +1,7 @@ package dotty.tools.scaladoc -import java.util.ServiceLoader -import java.io.File -import java.util.jar._ -import scala.jdk.CollectionConverters._ -import collection.immutable.ArraySeq - -import java.nio.file.Files - import dotty.tools.dotc.config.Settings._ import dotty.tools.dotc.config.AllScalaSettings -import dotty.tools.scaladoc.Scaladoc._ -import dotty.tools.dotc.config.Settings.Setting.value -import dotty.tools.dotc.config.Properties._ -import dotty.tools.dotc.config.CliCommand -import dotty.tools.dotc.core.Contexts._ class ScaladocSettings extends SettingGroup with AllScalaSettings: val unsupportedSettings = Seq( diff --git a/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala b/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala index f4fe674dbcb4..a07029d06c50 100644 --- a/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala +++ b/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala @@ -1,9 +1,5 @@ package dotty.tools.scaladoc -import java.nio.file.Path -import java.nio.file.Paths -import dotty.tools.dotc.core.Contexts.Context - enum SocialLinks(val url: String, val className: String): case Github(ghUrl: String) extends SocialLinks(ghUrl, "gh") case Twitter(tUrl: String) extends SocialLinks(tUrl, "twitter") diff --git a/scaladoc/src/dotty/tools/scaladoc/SourceLinks.scala b/scaladoc/src/dotty/tools/scaladoc/SourceLinks.scala index a9e300040fb8..b3732bcbc946 100644 --- a/scaladoc/src/dotty/tools/scaladoc/SourceLinks.scala +++ b/scaladoc/src/dotty/tools/scaladoc/SourceLinks.scala @@ -2,7 +2,6 @@ package dotty.tools.scaladoc import java.nio.file.Path import java.nio.file.Paths -import dotty.tools.dotc.core.Contexts.Context import scala.util.matching.Regex def pathToString(p: Path) = diff --git a/scaladoc/src/dotty/tools/scaladoc/api.scala b/scaladoc/src/dotty/tools/scaladoc/api.scala index 90a03658c90e..5af55f76a211 100644 --- a/scaladoc/src/dotty/tools/scaladoc/api.scala +++ b/scaladoc/src/dotty/tools/scaladoc/api.scala @@ -44,24 +44,24 @@ enum Modifier(val name: String, val prefix: Boolean): case Transparent extends Modifier("transparent", true) case Infix extends Modifier("infix", true) -case class ExtensionTarget(name: String, typeParams: Seq[TypeParameter], argsLists: Seq[ParametersList], signature: Signature, dri: DRI, position: Long) +case class ExtensionTarget(name: String, typeParams: Seq[TypeParameter], argsLists: Seq[TermParameterList], signature: Signature, dri: DRI, position: Long) case class ImplicitConversion(from: DRI, to: DRI) trait ImplicitConversionProvider { def conversion: Option[ImplicitConversion] } trait Classlike: def typeParams: Seq[TypeParameter] = Seq.empty - def argsLists: Seq[ParametersList] = Seq.empty + def argsLists: Seq[TermParameterList] = Seq.empty enum Kind(val name: String): case RootPackage extends Kind("") case Package extends Kind("package") - case Class(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[ParametersList]) + case Class(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[TermParameterList]) extends Kind("class") with Classlike case Object extends Kind("object") with Classlike - case Trait(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[ParametersList]) + case Trait(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[TermParameterList]) extends Kind("trait") with Classlike - case Enum(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[ParametersList]) extends Kind("enum") with Classlike + case Enum(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[TermParameterList]) extends Kind("enum") with Classlike case EnumCase(kind: Object.type | Kind.Type | Val.type | Class) extends Kind("case") - case Def(typeParams: Seq[TypeParameter], argsLists: Seq[ParametersList]) + case Def(paramLists: Seq[Either[TermParameterList,TypeParameterList]]) extends Kind("def") case Extension(on: ExtensionTarget, m: Kind.Def) extends Kind("def") case Constructor(base: Kind.Def) extends Kind("def") @@ -97,12 +97,12 @@ object Annotation: case class LinkParameter(name: Option[String] = None, dri: DRI, value: String) extends AnnotationParameter case class UnresolvedParameter(name: Option[String] = None, unresolvedText: String) extends AnnotationParameter -case class ParametersList( - parameters: Seq[Parameter], +case class TermParameterList( + parameters: Seq[TermParameter], modifiers: String ) -case class Parameter( +case class TermParameter( annotations: Seq[Annotation], modifiers: String, name: Option[String], @@ -112,6 +112,8 @@ case class Parameter( isGrouped: Boolean = false ) +type TypeParameterList = Seq[TypeParameter] + case class TypeParameter( annotations: Seq[Annotation], variance: "" | "+" | "-", diff --git a/scaladoc/src/dotty/tools/scaladoc/compat.scala b/scaladoc/src/dotty/tools/scaladoc/compat.scala index fc660d97cb5d..d2095b9cc98c 100644 --- a/scaladoc/src/dotty/tools/scaladoc/compat.scala +++ b/scaladoc/src/dotty/tools/scaladoc/compat.scala @@ -3,7 +3,6 @@ package dotty.tools.scaladoc import java.util.stream.Stream // comment out - wrong error! import java.util.stream.Collectors import java.util.Collections -import java.nio.file.Path import com.vladsch.flexmark.util.ast.{Node => MdNode} import dotty.tools.scaladoc.tasty.comments.wiki.WikiDocElement import scala.jdk.CollectionConverters._ @@ -37,4 +36,4 @@ extension [V](jlist: JList[V]) extension [V](jset: JSet[V]) def ++ (other: JSet[V]): JSet[V] = - Stream.of(jset, other).flatMap(_.stream).collect(Collectors.toSet()) \ No newline at end of file + Stream.of(jset, other).flatMap(_.stream).collect(Collectors.toSet()) diff --git a/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala b/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala index 865d78193886..4201cae4e2e6 100644 --- a/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala @@ -6,13 +6,14 @@ import com.vladsch.flexmark.parser.core._ import com.vladsch.flexmark.parser.block._ import com.vladsch.flexmark.util.ast.Block import com.vladsch.flexmark.util.ast.BlockContent -import com.vladsch.flexmark.util.options.DataHolder +import com.vladsch.flexmark.util.data.DataHolder import com.vladsch.flexmark.util.sequence.BasedSequence import com.vladsch.flexmark.util.sequence.SegmentedSequence import java.{util => ju} import ju.regex.Matcher import ju.regex.Pattern +import scala.jdk.CollectionConverters._ /** Copied from FencedCodeBlockParser. */ @@ -21,8 +22,11 @@ object WikiCodeBlockParser { private val CLOSING_FENCE = Pattern.compile("^(\\}{3})(?=[ \t]*$)$") class Factory extends CustomBlockParserFactory { + override def apply(options: DataHolder): BlockParserFactory = + new WikiCodeBlockParser.BlockFactory(options) + override def getAfterDependents = - new ju.HashSet[Class[_ <: CustomBlockParserFactory]](ju.Arrays.asList( + new ju.HashSet[Class[?]](ju.Arrays.asList( classOf[BlockQuoteParser.Factory], classOf[HeadingParser.Factory], //FencedCodeBlockParser.Factory.class, @@ -33,7 +37,7 @@ object WikiCodeBlockParser { )) override def getBeforeDependents = - new ju.HashSet[Class[_ <: CustomBlockParserFactory]](ju.Arrays.asList( + new ju.HashSet[Class[?]](ju.Arrays.asList( //BlockQuoteParser.Factory.class, //HeadingParser.Factory.class, //FencedCodeBlockParser.Factory.class, @@ -44,9 +48,6 @@ object WikiCodeBlockParser { )) override def affectsGlobalScope = false - - override def create(options: DataHolder) = - new WikiCodeBlockParser.BlockFactory(options) } private[WikiCodeBlockParser] class BlockFactory (val options: DataHolder) @@ -83,7 +84,7 @@ class WikiCodeBlockParser( final private val block = new FencedCodeBlock() private var content = new BlockContent - private val codeContentBlock = options.get(Parser.FENCED_CODE_CONTENT_BLOCK) + private val codeContentBlock = Parser.FENCED_CODE_CONTENT_BLOCK.get(options) def getBlock: Block = block def getFenceIndent: Int = fenceIndent @@ -141,7 +142,7 @@ class WikiCodeBlockParser( codeBlock.setCharsFromContent block.appendChild(codeBlock) } else { - val codeBlock = new Text(SegmentedSequence.of(segments)) + val codeBlock = new Text(SegmentedSequence.create(segments.asScala.toSeq:_*)) block.appendChild(codeBlock) } } diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala index 719033959b47..93b86ce0bc51 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala @@ -2,18 +2,9 @@ package dotty.tools.scaladoc package renderers import util.HTML._ -import scala.jdk.CollectionConverters._ -import java.net.URI -import java.net.URL import dotty.tools.scaladoc.site._ -import scala.util.Try import org.jsoup.Jsoup -import java.nio.file.Paths -import java.nio.file.Path import java.nio.file.Files -import java.nio.file.FileVisitOption -import java.io.File -import dotty.tools.scaladoc.staticFileSymbolUUID class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: DocContext) extends Renderer(rootPackage, members, extension = "html"): @@ -41,7 +32,7 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do case _ => Nil) :+ (Attr("data-pathToRoot") := pathToRoot(page.link.dri)) - html(attrs: _*)( + val htmlTag = html(attrs: _*)( head((mkHead(page) :+ docHead):_*), body( if !page.hasFrame then docBody @@ -49,6 +40,10 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do ) ) + val doctypeTag = s"" + val finalTag = raw(doctypeTag + htmlTag.toString) + finalTag + override def render(): Unit = val renderedResources = renderResources() super.render() @@ -203,11 +198,11 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do } val darkProjectLogoElem = - darkProjectLogo.flatMap { + darkProjectLogo.orElse(projectLogo).flatMap { case Resource.File(path, _) => Some(span(id := "dark-project-logo", cls := "project-logo")(img(src := resolveRoot(link.dri, path)))) case _ => None - }.orElse(projectLogoElem) + } val parentsHtml = val innerTags = parents.flatMap[TagArg](b => Seq( diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Locations.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Locations.scala index deb676e812c8..689234cdd29c 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Locations.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Locations.scala @@ -1,18 +1,10 @@ package dotty.tools.scaladoc package renderers -import util.HTML._ import scala.jdk.CollectionConverters._ import java.net.URI -import java.net.URL import dotty.tools.scaladoc.site._ -import scala.util.Try -import org.jsoup.Jsoup import java.nio.file.Paths -import java.nio.file.Path -import java.nio.file.Files -import java.io.File -import scala.util.matching._ import dotty.tools.scaladoc.util.Escape._ val UnresolvedLocationLink = "#" diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/MarkdownRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/MarkdownRenderer.scala index 12d41ac86218..6f20276e907e 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/MarkdownRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/MarkdownRenderer.scala @@ -2,17 +2,6 @@ package dotty.tools.scaladoc package renderers import util.HTML._ -import scala.jdk.CollectionConverters._ -import java.net.URI -import java.net.URL -import dotty.tools.scaladoc.site._ -import scala.util.Try -import org.jsoup.Jsoup -import java.nio.file.Paths -import java.nio.file.Path -import java.nio.file.Files -import java.nio.file.FileVisitOption -import java.io.File class MarkdownRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: DocContext) extends Renderer(rootPackage, members, extension = "md"): diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala index e50d87e99837..996b422b44fd 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala @@ -7,9 +7,6 @@ import util.HTML.{div, *} import scala.jdk.CollectionConverters.* import dotty.tools.scaladoc.translators.FilterAttributes -import dotty.tools.scaladoc.tasty.comments.markdown.DocFlexmarkRenderer -import com.vladsch.flexmark.util.ast.Node as MdNode -import dotty.tools.scaladoc.tasty.comments.wiki.WikiDocElement import org.jsoup.Jsoup import translators.* @@ -72,7 +69,7 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext def source(m: Member): Seq[AppliedTag] = summon[DocContext].sourceLinks.pathTo(m).fold(Nil){ link => - tableRow("Source", a(href := link)(m.sources.fold("(source)")(_.path.getFileName().toString()))) + tableRow("Source", a(href := link, target := "_blank")(m.sources.fold("(source)")(_.path.getFileName().toString()))) } def deprecation(m: Member): Seq[AppliedTag] = m.deprecated.fold(Nil){ a => @@ -409,14 +406,14 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext case (Some(on), members) => val typeSig = SignatureBuilder() .keyword("extension ") - .generics(on.typeParams) + .typeParamList(on.typeParams) .content val argsSig = SignatureBuilder() - .functionParameters(on.argsLists) + .functionTermParameters(on.argsLists) .content - val sig = typeSig ++ Signature(Plain(s"(${on.name}: ")) ++ on.signature ++ Signature(Plain(")")) ++ argsSig - MGroup(span(cls := "groupHeader")(sig.map(renderElement(_))), members.sortBy(_.name).toSeq, on.name) - }.toSeq + val sig = typeSig ++ argsSig + MGroup(span(cls := "groupHeader")(sig.map(renderElement(_))), members.sortBy(_.name).toSeq, on.name) -> on.position + }.toSeq.sortBy(_._2).map(_._1) div(cls := "membersList expand")( renderTabs( diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala index dc2157131e0b..1a43ea8648a8 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala @@ -2,18 +2,11 @@ package dotty.tools.scaladoc package renderers import util.HTML._ -import scala.jdk.CollectionConverters._ import collection.mutable.ListBuffer -import java.net.URI -import java.net.URL import dotty.tools.scaladoc.site._ -import scala.util.Try -import org.jsoup.Jsoup import java.nio.file.Paths import java.nio.file.Path import java.nio.file.Files -import java.nio.file.FileVisitOption -import java.io.File case class Page(link: Link, content: Member | ResolvedTemplate | String, children: Seq[Page], hidden: Boolean = false): def withNewChildren(newChildren: Seq[Page]) = copy(children = children ++ newChildren) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala index d6cd701225ba..b84c07b4bade 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala @@ -2,16 +2,10 @@ package dotty.tools.scaladoc package renderers import util.HTML._ -import scala.jdk.CollectionConverters._ -import java.net.URI -import java.net.URL -import dotty.tools.scaladoc.site._ -import scala.util.Try -import org.jsoup.Jsoup +import java.net.{URI, URL} import java.nio.file.Paths import java.nio.file.Path import java.nio.file.Files -import java.io.File import dotty.tools.scaladoc.translators.FilterAttributes import util._ import translators._ @@ -177,12 +171,19 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: def extensionTarget(member: Member): String = member.kind match - case Kind.Extension(on, _) => flattenToText(on.signature) + case Kind.Extension(on, _) => + val typeSig = SignatureBuilder() + .keyword("extension ") + .typeParamList(on.typeParams) + .content + val argsSig = SignatureBuilder() + .functionTermParameters(on.argsLists) + .content + flattenToText(typeSig ++ argsSig) case _ => "" def docPartRenderPlain(d: DocPart): String = import dotty.tools.scaladoc.tasty.comments.wiki._ - import com.vladsch.flexmark.util.ast.{Node => MdNode} def renderPlain(wd: WikiDocElement): String = wd match case Paragraph(text) => renderPlain(text) @@ -564,4 +565,4 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: case Resource.URL(url) => Nil case Resource.URLToCopy(url, dest) => - Seq(copy(new URL(url).openStream(), dest)) + Seq(copy(URI(url).toURL.openStream(), dest)) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala index ef80b4f2d327..ef7c06416e27 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala @@ -3,8 +3,7 @@ package renderers import util.HTML._ import scala.jdk.CollectionConverters._ -import java.net.URI -import java.net.URL +import java.net.{URI, URL} import dotty.tools.scaladoc.site._ import scala.util.Try import org.jsoup.Jsoup @@ -40,7 +39,7 @@ trait SiteRenderer(using DocContext) extends Locations: def processLocalLink(str: String): String = val staticSiteRootPath = content.ctx.root.toPath.toAbsolutePath - def asValidURL: Option[String] = Try(URL(str)).toOption.map(_ => str) + def asValidURL: Option[String] = Try(URI(str).toURL).toOption.map(_ => str) def asAsset: Option[String] = Option.when( Files.exists(staticSiteRootPath.resolve("_assets").resolve(str.stripPrefix("/"))) )( diff --git a/scaladoc/src/dotty/tools/scaladoc/site/BlogParser.scala b/scaladoc/src/dotty/tools/scaladoc/site/BlogParser.scala new file mode 100644 index 000000000000..68e709a339b2 --- /dev/null +++ b/scaladoc/src/dotty/tools/scaladoc/site/BlogParser.scala @@ -0,0 +1,26 @@ +package dotty.tools.scaladoc.site + +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory +import com.fasterxml.jackson.databind.DeserializationFeature +import java.io.File +import scala.beans.{BooleanBeanProperty, BeanProperty} +import scala.util.Try + +case class BlogConfig( + @BeanProperty input: String, + @BeanProperty output: String, + @BooleanBeanProperty hidden: Boolean +): + def this() = this(null, null, false) + +object BlogParser: + def readYml(content: File | String): BlogConfig = + val mapper = ObjectMapper(YAMLFactory()) + .findAndRegisterModules() + + content match + case f: File => + val ymlFile = f.toPath.resolve("blog.yml").toFile + if ymlFile.exists then mapper.readValue(ymlFile, classOf[BlogConfig]) else new BlogConfig + case s: String => Try(mapper.readValue(s, classOf[BlogConfig])).getOrElse(new BlogConfig) diff --git a/scaladoc/src/dotty/tools/scaladoc/site/FlexmarkSectionWrapper.scala b/scaladoc/src/dotty/tools/scaladoc/site/FlexmarkSectionWrapper.scala index 12e93505ab59..ec0bd241602a 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/FlexmarkSectionWrapper.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/FlexmarkSectionWrapper.scala @@ -4,7 +4,6 @@ package site import com.vladsch.flexmark.util.{ast => mdu, sequence} import com.vladsch.flexmark.{ast => mda} import com.vladsch.flexmark.formatter.Formatter -import com.vladsch.flexmark.util.options.MutableDataSet import scala.jdk.CollectionConverters._ import dotty.tools.scaladoc.tasty.comments.markdown.Section diff --git a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala index de3f511c8e67..7a90a462cba0 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala @@ -23,6 +23,10 @@ class StaticSiteContext( val docsPath = root.toPath.resolve("_docs") val blogPath = root.toPath.resolve("_blog") + def resolveNewBlogPath(stringPath: String): Path = + if stringPath.nonEmpty then root.toPath.resolve(stringPath) + else blogPath + def relativize(path: Path): Path = if args.apiSubdirectory then docsPath.relativize(path) diff --git a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala index c9ace108c9b2..489720cc5936 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala @@ -5,6 +5,7 @@ import java.io.File import java.nio.file.Files import java.nio.file.{ Paths, Path } import scala.io._ +import dotty.tools.scaladoc.site.BlogParser class StaticSiteLoader(val root: File, val args: Scaladoc.Args)(using StaticSiteContext, CompilerContext): val ctx: StaticSiteContext = summon[StaticSiteContext] @@ -114,10 +115,12 @@ class StaticSiteLoader(val root: File, val args: Scaladoc.Args)(using StaticSite } def loadBlog(): Option[LoadedTemplate] = { + val blogConfig = BlogParser.readYml(root) + val rootPath = Option(blogConfig.input).map(input => ctx.resolveNewBlogPath(input)).getOrElse(ctx.blogPath) + val defaultDirectory = Option(blogConfig.output).getOrElse("blog") + type Date = (String, String, String) - val rootPath = ctx.blogPath - val defaultDirectory = "blog" - if (!Files.exists(rootPath)) None + if (!Files.exists(rootPath) || blogConfig.hidden) None else { val indexPageOpt = Seq( rootPath.resolve("index.md"), diff --git a/scaladoc/src/dotty/tools/scaladoc/site/common.scala b/scaladoc/src/dotty/tools/scaladoc/site/common.scala index 6c4852961fec..0811d217537f 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/common.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/common.scala @@ -12,12 +12,13 @@ import com.vladsch.flexmark.ext.gfm.tasklist.TaskListExtension import com.vladsch.flexmark.ext.tables.TablesExtension import com.vladsch.flexmark.ext.yaml.front.matter.{AbstractYamlFrontMatterVisitor, YamlFrontMatterExtension} import com.vladsch.flexmark.parser.{Parser, ParserEmulationProfile} -import com.vladsch.flexmark.util.options.{DataHolder, MutableDataSet} import com.vladsch.flexmark.ext.wikilink.WikiLinkExtension import com.vladsch.flexmark.formatter.Formatter import com.vladsch.flexmark.html.HtmlRenderer import scala.jdk.CollectionConverters._ +import com.vladsch.flexmark.util.data.DataHolder +import com.vladsch.flexmark.util.data.MutableDataSet val docsRootDRI: DRI = DRI(location = "_docs/index", symbolUUID = staticFileSymbolUUID) val apiPageDRI: DRI = DRI(location = "api/index") @@ -62,11 +63,16 @@ def yamlParser(using ctx: StaticSiteContext): Parser = Parser.builder(defaultMar def loadTemplateFile(file: File, defaultTitle: Option[TemplateName] = None)(using ctx: StaticSiteContext): TemplateFile = { val lines = Files.readAllLines(file.toPath).asScala.toList - val (config, content) = if (lines.head == ConfigSeparator) { + val (config, content) = if (!lines.isEmpty && lines.head == ConfigSeparator) { // Taking the second occurrence of ConfigSeparator. // The rest may appear within the content. - val index = lines.drop(1).indexOf(ConfigSeparator) + 2 - (lines.take(index), lines.drop(index)) + val secondSeparatorIndex = lines.drop(1).indexOf(ConfigSeparator) + if secondSeparatorIndex != -1 then + (lines.take(secondSeparatorIndex + 2), lines.drop(secondSeparatorIndex + 2)) + else + // If there is no second occurrence of ConfigSeparator, we assume that the + // whole file is config. + (lines.tail, Nil) } else (Nil, lines) val configParsed = yamlParser.parse(config.mkString(LineSeparator)) diff --git a/scaladoc/src/dotty/tools/scaladoc/site/templates.scala b/scaladoc/src/dotty/tools/scaladoc/site/templates.scala index fe51bbe0614d..92e0096e5af1 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/templates.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/templates.scala @@ -11,7 +11,6 @@ import com.vladsch.flexmark.ext.gfm.tasklist.TaskListExtension import com.vladsch.flexmark.ext.tables.TablesExtension import com.vladsch.flexmark.ext.yaml.front.matter.{AbstractYamlFrontMatterVisitor, YamlFrontMatterExtension} import com.vladsch.flexmark.parser.{Parser, ParserEmulationProfile} -import com.vladsch.flexmark.util.options.{DataHolder, MutableDataSet} import com.vladsch.flexmark.html.HtmlRenderer import com.vladsch.flexmark.formatter.Formatter import liqp.Template diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala index 69e7c7764985..33f0e089053a 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala @@ -4,7 +4,6 @@ package snippets import com.vladsch.flexmark.util.{ast => mdu, sequence} import com.vladsch.flexmark.{ast => mda} import com.vladsch.flexmark.formatter.Formatter -import com.vladsch.flexmark.util.options.MutableDataSet import scala.jdk.CollectionConverters._ import dotty.tools.scaladoc.tasty.comments.markdown.ExtendedFencedCodeBlock diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 7ecc4827836a..2c7017f76636 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -12,6 +12,9 @@ import NameNormalizer._ import SyntheticsSupport._ import dotty.tools.dotc.core.NameKinds +// Please use this only for things defined in the api.scala file +import dotty.tools.{scaladoc => api} + trait ClassLikeSupport: self: TastyParser => import qctx.reflect._ @@ -45,7 +48,7 @@ trait ClassLikeSupport: .filter(s => s.exists && !s.isHiddenByVisibility) .map( _.tree.asInstanceOf[DefDef]) constr.fold(Nil)( - _.termParamss.map(pList => ParametersList(pList.params.map(p => mkParameter(p, parameterModifier)), paramListModifier(pList.params))) + _.termParamss.map(pList => api.TermParameterList(pList.params.map(p => mkParameter(p, parameterModifier)), paramListModifier(pList.params))) ) if classDef.symbol.flags.is(Flags.Module) then Kind.Object @@ -61,14 +64,18 @@ trait ClassLikeSupport: signatureOnly: Boolean = false, modifiers: Seq[Modifier] = classDef.symbol.getExtraModifiers(), ): Member = - def unpackTreeToClassDef(tree: Tree): ClassDef = tree match - case tree: ClassDef => tree - case TypeDef(_, tbt: TypeBoundsTree) => unpackTreeToClassDef(tbt.tpe.typeSymbol.tree) - case TypeDef(_, tt: TypeTree) => unpackTreeToClassDef(tt.tpe.typeSymbol.tree) - case c: Apply => - c.symbol.owner.tree.symbol.tree match + def unpackTreeToClassDef(tree: Tree): ClassDef = + def unpackApply(a: Apply) = + a.symbol.owner.tree match case tree: ClassDef => tree - case tt: TypeTree => unpackTreeToClassDef(tt.tpe.typeSymbol.tree) + + tree match + case tree: ClassDef => tree + case TypeDef(_, tbt: TypeBoundsTree) => unpackTreeToClassDef(tbt.tpe.typeSymbol.tree) + case TypeDef(_, tt: TypeTree) => unpackTreeToClassDef(tt.tpe.typeSymbol.tree) + case c: Apply => unpackApply(c) + case Block(_, c: Apply) => unpackApply(c) + case tt: TypeTree => unpackTreeToClassDef(tt.tpe.typeSymbol.tree) def signatureWithName(s: dotty.tools.scaladoc.Signature): dotty.tools.scaladoc.Signature = s match @@ -142,11 +149,12 @@ trait ClassLikeSupport: dd.symbol.extendedSymbol.map { extSym => val memberInfo = unwrapMemberInfo(c, dd.symbol) val typeParams = dd.symbol.extendedTypeParams.map(mkTypeArgument(_, memberInfo.genericTypes)) - val termParams = dd.symbol.extendedTermParamLists.zipWithIndex.flatMap { case (paramList, index) => - memberInfo.paramLists(index) match - case EvidenceOnlyParameterList => Nil - case info: RegularParameterList => - Seq(ParametersList(paramList.params.map(mkParameter(_, memberInfo = info)), paramListModifier(paramList.params))) + val termParams = dd.symbol.extendedTermParamLists.zipWithIndex.flatMap { case (termParamList, index) => + memberInfo.termParamLists(index) match + case MemberInfo.EvidenceOnlyParameterList => None + case MemberInfo.RegularParameterList(info) => + Some(api.TermParameterList(termParamList.params.map(mkParameter(_, memberInfo = info)), paramListModifier(termParamList.params))) + case _ => assert(false, "memberInfo.termParamLists contains a type parameter list !") } val target = ExtensionTarget( extSym.symbol.normalizedName, @@ -266,7 +274,8 @@ trait ClassLikeSupport: def getParentsAsTreeSymbolTuples: List[(Tree, Symbol)] = if noPosClassDefs.contains(c.symbol) then Nil else for - parentTree <- c.parents if parentTree.pos.start != parentTree.pos.end // We assume here that order is correct + // TODO: add exists function to position methods in Quotes and replace the condition here for checking the JPath + parentTree <- c.parents if parentTree.pos.sourceFile.getJPath.isDefined && parentTree.pos.start != parentTree.pos.end // We assume here that order is correct parentSymbol = parentTree match case t: TypeTree => t.tpe.typeSymbol case tree if tree.symbol.isClassConstructor => tree.symbol.owner @@ -334,44 +343,67 @@ trait ClassLikeSupport: def parseMethod( c: ClassDef, methodSymbol: Symbol, - emptyParamsList: Boolean = false, paramPrefix: Symbol => String = _ => "", specificKind: (Kind.Def => Kind) = identity ): Member = val method = methodSymbol.tree.asInstanceOf[DefDef] - val paramLists: List[TermParamClause] = methodSymbol.nonExtensionTermParamLists - val genericTypes: List[TypeDef] = if (methodSymbol.isClassConstructor) Nil else methodSymbol.nonExtensionLeadingTypeParams + val paramLists = methodSymbol.nonExtensionParamLists val memberInfo = unwrapMemberInfo(c, methodSymbol) - val basicKind: Kind.Def = Kind.Def( - genericTypes.map(mkTypeArgument(_, memberInfo.genericTypes, memberInfo.contextBounds)), - paramLists.zipWithIndex.flatMap { (pList, index) => - memberInfo.paramLists(index) match - case EvidenceOnlyParameterList => Nil - case info: RegularParameterList => - Seq(ParametersList(pList.params.map( + val unshuffledMemberInfoParamLists = + if methodSymbol.isExtensionMethod && methodSymbol.isRightAssoc then + // Taken from RefinedPrinter.scala + // If you change the names of the clauses below, also change them in right-associative-extension-methods.md + val (leftTyParams, rest1) = memberInfo.paramLists.span(_.isType) + val (leadingUsing, rest2) = rest1.span(_.isUsing) + val (rightTyParams, rest3) = rest2.span(_.isType) + val (rightParam, rest4) = rest3.splitAt(1) + val (leftParam, rest5) = rest4.splitAt(1) + val (trailingUsing, rest6) = rest5.span(_.isUsing) + if leftParam.nonEmpty then + // leftTyParams ::: leadingUsing ::: leftParam ::: trailingUsing ::: rightTyParams ::: rightParam ::: rest6 + // because of takeRight after, this is equivalent to the following: + rightTyParams ::: rightParam ::: rest6 + else + memberInfo.paramLists // it wasn't a binary operator, after all. + else + memberInfo.paramLists + + val croppedUnshuffledMemberInfoParamLists = unshuffledMemberInfoParamLists.takeRight(paramLists.length) + + val basicDefKind: Kind.Def = Kind.Def( + paramLists.zip(croppedUnshuffledMemberInfoParamLists).flatMap{ + case (_: TermParamClause, MemberInfo.EvidenceOnlyParameterList) => Nil + case (pList: TermParamClause, MemberInfo.RegularParameterList(info)) => + Some(Left(api.TermParameterList(pList.params.map( mkParameter(_, paramPrefix, memberInfo = info)), paramListModifier(pList.params) - )) + ))) + case (TypeParamClause(genericTypeList), MemberInfo.TypeParameterList(memInfoTypes)) => + Some(Right(genericTypeList.map(mkTypeArgument(_, memInfoTypes, memberInfo.contextBounds)))) + case (_,_) => + assert(false, s"croppedUnshuffledMemberInfoParamLists and SymOps.nonExtensionParamLists disagree on whether this clause is a type or term one") } ) val methodKind = - if methodSymbol.isClassConstructor then Kind.Constructor(basicKind) - else if methodSymbol.flags.is(Flags.Implicit) then extractImplicitConversion(method.returnTpt.tpe) match - case Some(conversion) if paramLists.size == 0 || (paramLists.size == 1 && paramLists.head.params.size == 0) => - Kind.Implicit(basicKind, Some(conversion)) - case None if paramLists.size == 1 && paramLists(0).params.size == 1 => - Kind.Implicit(basicKind, Some( - ImplicitConversion( - paramLists(0).params(0).tpt.tpe.typeSymbol.dri, - method.returnTpt.tpe.typeSymbol.dri - ) - )) - case _ => - Kind.Implicit(basicKind, None) - else if methodSymbol.flags.is(Flags.Given) then Kind.Given(basicKind, Some(method.returnTpt.tpe.asSignature), extractImplicitConversion(method.returnTpt.tpe)) - else specificKind(basicKind) + if methodSymbol.isClassConstructor then Kind.Constructor(basicDefKind) + else if methodSymbol.flags.is(Flags.Implicit) then + val termParamLists: List[TermParamClause] = methodSymbol.nonExtensionTermParamLists + extractImplicitConversion(method.returnTpt.tpe) match + case Some(conversion) if termParamLists.size == 0 || (termParamLists.size == 1 && termParamLists.head.params.size == 0) => + Kind.Implicit(basicDefKind, Some(conversion)) + case None if termParamLists.size == 1 && termParamLists(0).params.size == 1 => + Kind.Implicit(basicDefKind, Some( + ImplicitConversion( + termParamLists(0).params(0).tpt.tpe.typeSymbol.dri, + method.returnTpt.tpe.typeSymbol.dri + ) + )) + case _ => + Kind.Implicit(basicDefKind, None) + else if methodSymbol.flags.is(Flags.Given) then Kind.Given(basicDefKind, Some(method.returnTpt.tpe.asSignature), extractImplicitConversion(method.returnTpt.tpe)) + else specificKind(basicDefKind) val origin = if !methodSymbol.isOverridden then Origin.RegularlyDefined else val overriddenSyms = methodSymbol.allOverriddenSymbols.map(_.owner) @@ -403,7 +435,7 @@ trait ClassLikeSupport: val inlinePrefix = if argument.symbol.flags.is(Flags.Inline) then "inline " else "" val nameIfNotSynthetic = Option.when(!argument.symbol.flags.is(Flags.Synthetic))(argument.symbol.normalizedName) val name = argument.symbol.normalizedName - Parameter( + api.TermParameter( argument.symbol.getAnnotations(), inlinePrefix + prefix(argument.symbol), nameIfNotSynthetic, @@ -497,7 +529,7 @@ trait ClassLikeSupport: experimental: Option[Annotation] = None ) = Member( name = symbol.normalizedName, - fullName = symbol.fullName, + fullName = symbol.normalizedFullName, dri = symbol.dri, kind = kind, visibility = symbol.getVisibility(), @@ -513,16 +545,26 @@ trait ClassLikeSupport: experimental = experimental ) - object EvidenceOnlyParameterList - type RegularParameterList = Map[String, TypeRepr] - type ParameterList = RegularParameterList | EvidenceOnlyParameterList.type case class MemberInfo( - genericTypes: Map[String, TypeBounds], - paramLists: List[ParameterList], + paramLists: List[MemberInfo.ParameterList], res: TypeRepr, contextBounds: Map[String, DSignature] = Map.empty, - ) + ){ + val genericTypes: Map[String, TypeBounds] = paramLists.collect{ case MemberInfo.TypeParameterList(types) => types }.headOption.getOrElse(Map()) + + val termParamLists: List[MemberInfo.ParameterList] = paramLists.filter(_.isTerm) + } + + object MemberInfo: + enum ParameterList(val isTerm: Boolean, val isUsing: Boolean): + inline def isType = !isTerm + case EvidenceOnlyParameterList extends ParameterList(isTerm = true, isUsing = false) + case RegularParameterList(m: Map[String, TypeRepr])(isUsing: Boolean) extends ParameterList(isTerm = true, isUsing) + case TypeParameterList(m: Map[String, TypeBounds]) extends ParameterList(isTerm = false, isUsing = false) + + export ParameterList.{RegularParameterList, EvidenceOnlyParameterList, TypeParameterList} + def unwrapMemberInfo(c: ClassDef, symbol: Symbol): MemberInfo = @@ -539,14 +581,15 @@ trait ClassLikeSupport: // Documenting method slightly different then its definition is withing the 'undefiend behaviour'. symbol.paramSymss.flatten.find(_.name == name).exists(_.flags.is(Flags.Implicit)) - def handlePolyType(polyType: PolyType): MemberInfo = - MemberInfo(polyType.paramNames.zip(polyType.paramBounds).toMap, List.empty, polyType.resType) + def handlePolyType(memberInfo: MemberInfo, polyType: PolyType): MemberInfo = + val typeParamList = MemberInfo.TypeParameterList(polyType.paramNames.zip(polyType.paramBounds).toMap) + MemberInfo(memberInfo.paramLists :+ typeParamList, polyType.resType) def handleMethodType(memberInfo: MemberInfo, methodType: MethodType): MemberInfo = val rawParams = methodType.paramNames.zip(methodType.paramTypes).toMap + val isUsing = methodType.isImplicit val (evidences, notEvidences) = rawParams.partition(e => isSyntheticEvidence(e._1)) - def findParamRefs(t: TypeRepr): Seq[ParamRef] = t match case paramRef: ParamRef => Seq(paramRef) case AppliedType(_, args) => args.flatMap(findParamRefs) @@ -573,22 +616,23 @@ trait ClassLikeSupport: val newParams = notEvidences ++ paramsThatLookLikeContextBounds - val newLists: List[ParameterList] = if newParams.isEmpty && contextBounds.nonEmpty - then memberInfo.paramLists ++ Seq(EvidenceOnlyParameterList) - else memberInfo.paramLists ++ Seq(newParams) + val termParamList = if newParams.isEmpty && contextBounds.nonEmpty + then MemberInfo.EvidenceOnlyParameterList + else MemberInfo.RegularParameterList(newParams)(isUsing) + - MemberInfo(memberInfo.genericTypes, newLists , methodType.resType, contextBounds.toMap) + MemberInfo(memberInfo.paramLists :+ termParamList, methodType.resType, contextBounds.toMap) def handleByNameType(memberInfo: MemberInfo, byNameType: ByNameType): MemberInfo = - MemberInfo(memberInfo.genericTypes, memberInfo.paramLists, byNameType.underlying) + MemberInfo(memberInfo.paramLists, byNameType.underlying) def recursivelyCalculateMemberInfo(memberInfo: MemberInfo): MemberInfo = memberInfo.res match - case p: PolyType => recursivelyCalculateMemberInfo(handlePolyType(p)) + case p: PolyType => recursivelyCalculateMemberInfo(handlePolyType(memberInfo, p)) case m: MethodType => recursivelyCalculateMemberInfo(handleMethodType(memberInfo, m)) case b: ByNameType => handleByNameType(memberInfo, b) case _ => memberInfo - recursivelyCalculateMemberInfo(MemberInfo(Map.empty, List.empty, baseTypeRepr)) + recursivelyCalculateMemberInfo(MemberInfo(List.empty, baseTypeRepr)) private def paramListModifier(parameters: Seq[ValDef]): String = if parameters.size > 0 then diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala index 0cdb3535c3ff..8a703cfb5d24 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala @@ -317,6 +317,8 @@ trait InkuireSupport(using DocContext) extends Resources: inner(tpe, vars) case tl @ TypeLambda(paramNames, _, resType) => Inkuire.TypeLambda(paramNames.map(Inkuire.TypeLambda.argument), inner(resType, vars)) //TODO [Inkuire] Type bounds + case pt @ PolyType(paramNames, _, resType) => + Inkuire.TypeLambda(paramNames.map(Inkuire.TypeLambda.argument), inner(resType, vars)) //TODO [Inkuire] Type bounds case r: Refinement => inner(r.info, vars) //TODO [Inkuire] Refinements case t @ AppliedType(tpe, typeList) => @@ -357,10 +359,8 @@ trait InkuireSupport(using DocContext) extends Resources: Inkuire.Type.unresolved //TODO [Inkuire] <- should be handled by Singleton case, but didn't work case MatchType(bond, sc, cases) => inner(sc, vars) - case ParamRef(TypeLambda(names, _, _), i) => - Inkuire.TypeLambda.argument(names(i)) - case ParamRef(m: MethodType, i) => - inner(m.paramTypes(i), vars) + case ParamRef(binder: LambdaType, i) => + Inkuire.TypeLambda.argument(binder.paramNames(i)) case RecursiveType(tp) => inner(tp, vars) case m@MethodType(_, typeList, resType) => diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/NameNormalizer.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/NameNormalizer.scala index 687ad6ecbf44..196c3e056b36 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/NameNormalizer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/NameNormalizer.scala @@ -17,6 +17,18 @@ object NameNormalizer { val escaped = escapedName(constructorNormalizedName) escaped } + + def ownerNameChain: List[String] = { + import reflect.* + if s.isNoSymbol then List.empty + else if s == defn.EmptyPackageClass then List.empty + else if s == defn.RootPackage then List.empty + else if s == defn.RootClass then List.empty + else s.owner.ownerNameChain :+ s.normalizedName + } + + def normalizedFullName: String = + s.ownerNameChain.mkString(".") private val ignoredKeywords: Set[String] = Set("this") diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/Scaladoc2AnchorCreator.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/Scaladoc2AnchorCreator.scala index ba59f77495b1..3c34a1c9bba9 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/Scaladoc2AnchorCreator.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/Scaladoc2AnchorCreator.scala @@ -7,10 +7,66 @@ import scala.util.matching.Regex object Scaladoc2AnchorCreator: - def getScaladoc2Type(using Quotes)(t: reflect.Tree) = - import reflect.* - val regex = t match - case d: DefDef => "def" - case t: TypeDef => "type" - case v: ValDef => "val|var" - t.show(using Printer.TreeShortCode).split(regex, 2)(1).replace(" ","") + def getScaladoc2Type(using Quotes)(sym: quotes.reflect.Symbol) = signatureAnchor(sym) + + /** Creates the signature anchor + * + * - `X` for a `type X ...` + * - `x:X` for a `val x: X` + * - `f[U1,...](x1:T1,...)(impliciti1:U1,impliciti2:U2,...)...:R` for a `def f[U1, ...](x1: T1, ...)(implicit i1: U1, i2: U2...)...: R` + * + * Types are printed without their paths. No spaces are printed in the output. + */ + private def signatureAnchor(using Quotes)(sym: quotes.reflect.Symbol): String = + import quotes.reflect.* + def signatureType(tp: quotes.reflect.TypeRepr): String = + tp match + case mt @ MethodType(paramNames, paramTypes, res) => + val implicitPrefix = if mt.isImplicit then "implicit" else "" + val closeClause = res match + case _: MethodOrPoly => ")" + case _ => "):" + paramNames.zip(paramTypes.map(signatureType)) + .map((name, tpe) => s"$implicitPrefix$name:$tpe") + .mkString("(", ",", closeClause) + signatureType(res) + case PolyType(paramNames, paramBounds, res) => + val closeClause = res match + case _: MethodOrPoly => "]" + case _ => "]:" + paramNames.zip(paramBounds.map(signatureType)) + .map((name, tpe) => s"$name$tpe") + .mkString("[", ",", closeClause) + signatureType(res) + case TypeLambda(paramNames, paramBounds, res) => + paramNames.zip(paramBounds.map(signatureType)) + .map((name, tpe) => s"$name$tpe") + .mkString("[", ",", "]") + "=>" + signatureType(res) + case ByNameType(tp) => + ":" + signatureType(tp) + case TypeBounds(low, hi) => + val lowBound = if low =:= defn.NothingClass.typeRef then "" else ">:" + signatureType(low) + val hiBound = if low =:= defn.AnyClass.typeRef then "" else "<:" + signatureType(hi) + lowBound + hiBound + case tp: ParamRef => + tp.binder match + case binder: MethodType => binder.paramNames(tp.paramNum) + ".type" + case binder: PolyType => binder.paramNames(tp.paramNum) + case binder: LambdaType => binder.paramNames(tp.paramNum) + case AppliedType(tycon, args) => + args.map { + case tp: TypeBounds => "_" + signatureType(tp) + case tp => signatureType(tp) + }.mkString(signatureType(tycon) + "[", ",", "]") + case tp: AnnotatedType => + signatureType(tp.underlying) + "@" + tp.annotation.symbol.owner.name + case tp: ThisType => + signatureType(tp.tref) + ".this" + case tp: TypeRef => + tp.name + case tp => + // TODO handle other cases without using show (show does not have a stable representation) + tp.show(using Printer.TypeReprShortCode).replace(" ","") + + sym match + case sym if sym.isType => sym.name + case sym if sym.flags.is(Flags.Method) => sym.name + signatureType(sym.info) + case sym => sym.name + ":" + signatureType(sym.info) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/SymOps.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/SymOps.scala index ca3dac7e12f8..5bc1b98a7fff 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/SymOps.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/SymOps.scala @@ -143,62 +143,67 @@ object SymOps: import reflect._ sym.flags.is(Flags.Artifact) - def isLeftAssoc: Boolean = !sym.name.endsWith(":") + def isRightAssoc: Boolean = sym.name.endsWith(":") + + def isLeftAssoc: Boolean = !sym.isRightAssoc def extendedSymbol: Option[reflect.ValDef] = import reflect.* - Option.when(sym.isExtensionMethod){ - val termParamss = sym.tree.asInstanceOf[DefDef].termParamss - if sym.isLeftAssoc || termParamss.size == 1 then termParamss(0).params(0) - else termParamss(1).params(0) + if sym.isExtensionMethod then + sym.extendedTermParamLists.find(param => !param.isImplicit && !param.isGiven).flatMap(_.params.headOption) + else None + + def splitExtensionParamList: (List[reflect.ParamClause], List[reflect.ParamClause]) = + import reflect.* + + def getPositionStartOption(pos: Option[Position]): Option[Int] = pos.flatMap { + case dotty.tools.dotc.util.NoSourcePosition => None + case pos: Position => Some(pos.start) + } + + def comparePositionStarts(posA: Option[Position], posB: Option[Position]): Option[Boolean] = + for { + startA <- getPositionStartOption(posA) + startB <- getPositionStartOption(posB) + } yield startA < startB + + sym.tree match + case tree: DefDef => + tree.paramss.partition(_.params.headOption.flatMap(param => + comparePositionStarts(param.symbol.pos, tree.symbol.pos)).getOrElse(false) + ) + case _ => Nil -> Nil + + def extendedParamLists: List[reflect.ParamClause] = sym.splitExtensionParamList._1 + + def extendedTypeParamLists: List[reflect.TypeParamClause] = + sym.extendedParamLists.collect { + case typeClause: reflect.TypeParamClause => typeClause } def extendedTypeParams: List[reflect.TypeDef] = - import reflect.* - val method = sym.tree.asInstanceOf[DefDef] - method.leadingTypeParams + sym.extendedTypeParamLists.headOption.map(_.params).getOrElse(List()) def extendedTermParamLists: List[reflect.TermParamClause] = - import reflect.* - if sym.nonExtensionLeadingTypeParams.nonEmpty then - sym.nonExtensionParamLists.takeWhile { - case _: TypeParamClause => false - case _ => true - }.collect { - case tpc: TermParamClause => tpc - } - else - List.empty + sym.extendedParamLists.collect { + case tpc: reflect.TermParamClause => tpc + } + + def nonExtensionParamLists: List[reflect.ParamClause] = + sym.splitExtensionParamList._2 def nonExtensionTermParamLists: List[reflect.TermParamClause] = - import reflect.* - if sym.nonExtensionLeadingTypeParams.nonEmpty then - sym.nonExtensionParamLists.dropWhile { - case _: TypeParamClause => false - case _ => true - }.drop(1).collect { - case tpc: TermParamClause => tpc - } - else - sym.nonExtensionParamLists.collect { - case tpc: TermParamClause => tpc - } + sym.nonExtensionParamLists.collect { + case tpc: reflect.TermParamClause => tpc + } - def nonExtensionParamLists: List[reflect.ParamClause] = - import reflect.* - val method = sym.tree.asInstanceOf[DefDef] - if sym.isExtensionMethod then - val params = method.paramss - val toDrop = if method.leadingTypeParams.nonEmpty then 2 else 1 - if sym.isLeftAssoc || params.size == 1 then params.drop(toDrop) - else params.head :: params.tail.drop(toDrop) - else method.paramss + def nonExtensionTypeParamLists: List[reflect.TypeParamClause] = + sym.nonExtensionParamLists.collect { + case typeClause: reflect.TypeParamClause => typeClause + } def nonExtensionLeadingTypeParams: List[reflect.TypeDef] = - import reflect.* - sym.nonExtensionParamLists.collectFirst { - case TypeParamClause(params) => params - }.toList.flatten + sym.nonExtensionTypeParamLists.headOption.map(_.params).getOrElse(List()) end extension @@ -230,7 +235,7 @@ class SymOpsWithLinkCache: def constructPathForScaladoc2: String = val l = escapeUrl(location.mkString("/")) val scaladoc2Anchor = if anchor.isDefined then { - "#" + getScaladoc2Type(sym.tree) + "#" + getScaladoc2Type(sym) } else "" docURL + l + extension + scaladoc2Anchor diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala index dabc6468d4c9..b33d5f61faac 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala @@ -49,7 +49,7 @@ object SyntheticsSupport: c.symbol.typeRef.baseClasses.map(b => b -> c.symbol.typeRef.baseType(b)).tail def typeForClass(using Quotes)(c: reflect.ClassDef): reflect.TypeRepr = - c.symbol.typeRef.appliedTo(c.symbol.typeMembers.filter(_.isTypeParam).map(_.typeRef)) + c.symbol.typeRef.appliedTo(c.symbol.declaredTypes.filter(_.isTypeParam).map(_.typeRef)) /* We need there to filter out symbols with certain flagsets, because these symbols come from compiler and TASTY can't handle them well. They are valdefs that describe case companion objects and cases from enum. diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala index f8be9e766fa8..cd1bed42f485 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala @@ -228,6 +228,6 @@ case class TastyParser( try Traverser.traverseTree(root)(Symbol.spliceOwner) catch case e: Throwable => println(s"Problem parsing ${root.pos}, documentation may not be generated.") - e.printStackTrace() + // e.printStackTrace() docs.result() diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala index 0cf2669407c8..c94eda9409b2 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala @@ -290,10 +290,10 @@ trait TypesSupport: } inner(sc) ++ keyword(" match ").l ++ plain("{\n").l ++ casesTexts ++ plain(spaces + "}").l - case ParamRef(TypeLambda(names, _, _), i) => tpe(names.apply(i)).l - case ParamRef(m: MethodType, i) => tpe(m.paramNames(i)).l ++ plain(".type").l + case ParamRef(binder: LambdaType, i) => tpe(binder.paramNames(i)).l + case RecursiveType(tp) => inner(tp) case MatchCase(pattern, rhs) => diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala index 66844f5049d3..ff4405d3ec71 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala @@ -7,7 +7,6 @@ import scala.util.Try import com.vladsch.flexmark.util.{ast => mdu, sequence} import com.vladsch.flexmark.{ast => mda} import com.vladsch.flexmark.formatter.Formatter -import com.vladsch.flexmark.util.options.MutableDataSet import com.vladsch.flexmark.util.sequence.BasedSequence import scala.quoted._ diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala index f5dd0ea88528..edf9051c0ed7 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala @@ -9,7 +9,6 @@ import com.vladsch.flexmark.formatter.Formatter import com.vladsch.flexmark.parser.Parser import com.vladsch.flexmark.util.sequence.CharSubSequence import com.vladsch.flexmark.parser.ParserEmulationProfile -import com.vladsch.flexmark.ext.gfm.tables.TablesExtension import com.vladsch.flexmark.ext.gfm.strikethrough.StrikethroughExtension import com.vladsch.flexmark.ext.gfm.tasklist.TaskListExtension import com.vladsch.flexmark.ext.emoji.EmojiExtension @@ -17,10 +16,12 @@ import com.vladsch.flexmark.ext.autolink.AutolinkExtension import com.vladsch.flexmark.ext.anchorlink.AnchorLinkExtension import com.vladsch.flexmark.ext.yaml.front.matter.YamlFrontMatterExtension import com.vladsch.flexmark.ext.wikilink.WikiLinkExtension -import com.vladsch.flexmark.util.options.{ DataHolder, MutableDataSet } -import com.vladsch.flexmark.util.builder.Extension import scala.jdk.CollectionConverters._ +import com.vladsch.flexmark.util.misc.Extension +import com.vladsch.flexmark.ext.tables.TablesExtension +import com.vladsch.flexmark.util.data.MutableDataSet +import com.vladsch.flexmark.util.data.DataHolder object MarkdownParser { diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala index ad5533d634ad..d797eaed7fbf 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala @@ -15,6 +15,9 @@ import com.vladsch.flexmark._ import dotty.tools.scaladoc.snippets._ import scala.jdk.CollectionConverters._ +import com.vladsch.flexmark.util.data.MutableDataHolder +import com.vladsch.flexmark.util.data.DataHolder +import com.vladsch.flexmark.html.renderer.NodeRenderingHandler.CustomNodeRenderer class DocLinkNode( val target: DocLink, @@ -40,7 +43,7 @@ class DocFlexmarkParser(resolveLink: String => DocLink) extends Parser.ParserExt class Factory extends LinkRefProcessorFactory: override def getBracketNestingLevel(options: DataHolder) = 1 override def getWantExclamationPrefix(options: DataHolder) = false - override def create(doc: Document): LinkRefProcessor = + override def apply(doc: Document): LinkRefProcessor = new WikiLinkLinkRefProcessor(doc): override def createNode(nodeChars: BasedSequence): Node = val chars = nodeChars.toString.substring(2, nodeChars.length - 2) @@ -75,7 +78,7 @@ case class DocFlexmarkRenderer(renderLink: (DocLink, String) => String) ) object Factory extends NodeRendererFactory: - override def create(options: DataHolder): NodeRenderer = Render + override def apply(options: DataHolder): NodeRenderer = Render def extend(htmlRendererBuilder: HtmlRenderer.Builder, tpe: String): Unit = htmlRendererBuilder.nodeRendererFactory(Factory) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala index 1fa1a604c85a..421c7eaab76f 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala @@ -3,17 +3,23 @@ package tasty.comments.markdown import com.vladsch.flexmark.html.* import com.vladsch.flexmark.html.renderer.* +import com.vladsch.flexmark.html.renderer.NodeRenderingHandler.CustomNodeRenderer import com.vladsch.flexmark.parser.* import com.vladsch.flexmark.ext.wikilink.* import com.vladsch.flexmark.ext.wikilink.internal.WikiLinkLinkRefProcessor import com.vladsch.flexmark.util.ast.* import com.vladsch.flexmark.util.options.* import com.vladsch.flexmark.util.sequence.BasedSequence -import com.vladsch.flexmark.util.html.{AttributeImpl, Attributes} import com.vladsch.flexmark.* import com.vladsch.flexmark.ast.FencedCodeBlock import scala.collection.mutable +import com.vladsch.flexmark.util.data.MutableDataHolder +import com.vladsch.flexmark.util.html.Attributes +import com.vladsch.flexmark.util.html.AttributeImpl +import com.vladsch.flexmark.util.data.DataHolder +import com.vladsch.flexmark.util.html.Attribute +import com.vladsch.flexmark.util.html.MutableAttributes object SectionRenderingExtension extends HtmlRenderer.HtmlRendererExtension: @@ -30,18 +36,18 @@ object SectionRenderingExtension extends HtmlRenderer.HtmlRendererExtension: repeatedIds.update((c, header.getText), repeatedIds((c, header.getText)) + 1) val id = idGenerator.getId(header.getText.append(ifSuffixStr)) val anchor = AnchorLink(s"#$id") - val attributes = Attributes() val headerClass: String = header.getLevel match case 1 => "h500" case 2 => "h500" case 3 => "h400" case 4 => "h300" case _ => "h50" - attributes.addValue(AttributeImpl.of("class", headerClass)) + val attributes = MutableAttributes() + attributes.addValue("class", headerClass) val embeddedAttributes = EmbeddedAttributeProvider.EmbeddedNodeAttributes(header, attributes) header.prependChild(embeddedAttributes) header.prependChild(anchor) - html.attr(AttributeImpl.of("id", id)).withAttr.tag("section", false, false, () => { + html.attr("id", id).withAttr.tag("section", false, false, () => { c.render(header) body.foreach(c.render) }) @@ -59,7 +65,8 @@ object SectionRenderingExtension extends HtmlRenderer.HtmlRendererExtension: ) object Factory extends NodeRendererFactory: - override def create(options: DataHolder): NodeRenderer = Render + override def apply(options: DataHolder): NodeRenderer = Render + def extend(htmlRendererBuilder: HtmlRenderer.Builder, tpe: String): Unit = htmlRendererBuilder.nodeRendererFactory(Factory) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala index e70b0883a31e..e980c5fc44ef 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala @@ -13,6 +13,9 @@ import com.vladsch.flexmark.util.options._ import com.vladsch.flexmark.util.sequence.BasedSequence import com.vladsch.flexmark._ import com.vladsch.flexmark.ast.FencedCodeBlock +import com.vladsch.flexmark.util.data.MutableDataHolder +import com.vladsch.flexmark.html.renderer.NodeRenderingHandler.CustomNodeRenderer +import com.vladsch.flexmark.util.data.DataHolder /** * SnippetRenderingExtension is responsible for running an analysis for scala codeblocks in the static documentation/scaladoc comments. @@ -39,7 +42,7 @@ object SnippetRenderingExtension extends HtmlRenderer.HtmlRendererExtension: ) object Factory extends NodeRendererFactory: - override def create(options: DataHolder): NodeRenderer = Render + override def apply(options: DataHolder): NodeRenderer = Render def extend(htmlRendererBuilder: HtmlRenderer.Builder, tpe: String): Unit = htmlRendererBuilder.nodeRendererFactory(Factory) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/reflect.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/reflect.scala index b48519e29d28..419beac50134 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/reflect.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/reflect.scala @@ -4,4 +4,4 @@ package tasty import scala.quoted._ /** Shorthand for `quotes.reflect` */ -transparent inline def reflect(using inline q: Quotes): q.reflect.type = q.reflect +transparent inline def reflect(using q: Quotes): q.reflect.type = q.reflect diff --git a/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala b/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala index 44eba3a39807..8ed7436bb11d 100644 --- a/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala @@ -29,7 +29,7 @@ class ImplicitMembersExtensionTransformer(using DocContext) extends(Module => Mo case m @ Member(_, _, _, Kind.Extension(ExtensionTarget(_, _, _, _, MyDri, _), _), Origin.RegularlyDefined) => val kind = m.kind match case Kind.Extension(_, d) => d - case _ => Kind.Def(Nil, Nil) + case _ => Kind.Def(Nil) Seq(m.withOrigin(Origin.ExtensionFrom(source.name, source.dri)).withKind(kind)) case m @ Member(_, _, _, conversionProvider: ImplicitConversionProvider, Origin.RegularlyDefined) => diff --git a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala index 88561282afb0..fd8dfc4f5b6c 100644 --- a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala +++ b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala @@ -58,8 +58,8 @@ class ScalaSignatureProvider: builder.kind(showKind), builder.name(member.name, member.dri), builder - .generics(kind.typeParams) - .functionParameters(kind.argsLists) + .typeParamList(kind.typeParams) + .functionTermParameters(kind.argsLists) .parentsSignature(member) ) @@ -106,8 +106,7 @@ class ScalaSignatureProvider: builder.kind(showKind), builder.name(method.name, method.dri), builder - .generics(kind.typeParams) - .functionParameters(kind.argsLists) + .functionParameters(kind.paramLists) .pipe { builder => instance.fold(builder)(i => builder.plain(": ").signature(i)) } @@ -151,7 +150,7 @@ class ScalaSignatureProvider: builder.modifiersAndVisibility(typeDef), builder.kind(tpe), builder.name(typeDef.name, typeDef.dri), - builder.generics(tpe.typeParams).pipe { bdr => + builder.typeParamList(tpe.typeParams).pipe { bdr => if (!tpe.opaque) { (if tpe.concreate then bdr.plain(" = ") else bdr) .signature(typeDef.signature) diff --git a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala index acbfe87b5d25..d28dd6ca18fe 100644 --- a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala +++ b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala @@ -26,7 +26,7 @@ case class SignatureBuilder(content: Signature = Nil) extends ScalaSignatureUtil def annotationsBlock(d: Member): SignatureBuilder = d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation)} - def annotationsInline(d: Parameter): SignatureBuilder = + def annotationsInline(d: TermParameter): SignatureBuilder = d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation) } def annotationsInline(t: TypeParameter): SignatureBuilder = @@ -74,21 +74,27 @@ case class SignatureBuilder(content: Signature = Nil) extends ScalaSignatureUtil def kind(k: Kind) = keyword(k.name + " ") - def generics(on: Seq[TypeParameter]) = list(on.toList, List(Plain("[")), List(Plain("]"))){ (bdr, e) => + + def functionParameters(paramss: Seq[ Either[TermParameterList,TypeParameterList] ]) = + this.list(paramss, separator = List(Plain(""))) { + case (bld, Left(params: TermParameterList)) => bld.termParamList(params) + case (bld, Right(params: TypeParameterList)) => bld.typeParamList(params) + } + + def termParamList(params: TermParameterList) = + this.list(params.parameters, prefix = List(Plain("("), Keyword(params.modifiers)), suffix = List(Plain(")")), forcePrefixAndSuffix = true) { (bld, p) => + val annotationsAndModifiers = bld.annotationsInline(p) + .keyword(p.modifiers) + val name = p.name.fold(annotationsAndModifiers)(annotationsAndModifiers.name(_, p.dri).plain(": ")) + name.signature(p.signature) + } + + def typeParamList(on: TypeParameterList) = list(on.toList, List(Plain("[")), List(Plain("]"))){ (bdr, e) => bdr.annotationsInline(e).keyword(e.variance).tpe(e.name, Some(e.dri)).signature(e.signature) } - def functionParameters(params: Seq[ParametersList]) = - if params.isEmpty then this.plain("") - else if params.size == 1 && params(0).parameters == Nil then this.plain("()") - else this.list(params, separator = List(Plain(""))) { (bld, pList) => - bld.list(pList.parameters, prefix = List(Plain("("), Keyword(pList.modifiers)), suffix = List(Plain(")")), forcePrefixAndSuffix = true) { (bld, p) => - val annotationsAndModifiers = bld.annotationsInline(p) - .keyword(p.modifiers) - val name = p.name.fold(annotationsAndModifiers)(annotationsAndModifiers.name(_, p.dri).plain(": ")) - name.signature(p.signature) - } - } + def functionTermParameters(paramss: Seq[TermParameterList]) = + this.list(paramss, separator = List(Plain(""))) { (bld, pList) => bld.termParamList(pList) } trait ScalaSignatureUtils: extension (tokens: Seq[String]) def toSignatureString(): String = diff --git a/scaladoc/src/dotty/tools/scaladoc/util/html.scala b/scaladoc/src/dotty/tools/scaladoc/util/html.scala index e66ba3a4b706..72776a7413aa 100644 --- a/scaladoc/src/dotty/tools/scaladoc/util/html.scala +++ b/scaladoc/src/dotty/tools/scaladoc/util/html.scala @@ -108,6 +108,7 @@ object HTML: val onclick=Attr("onclick") val titleAttr =Attr("title") val onkeyup = Attr("onkeyup") + val target = Attr("target") def raw(content: String): AppliedTag = new AppliedTag(content) def raw(content: StringBuilder): AppliedTag = content diff --git a/scaladoc/test-documentations/emptyPage/_docs/hello.md b/scaladoc/test-documentations/emptyPage/_docs/hello.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/scaladoc/test-documentations/noConfigEnd/_docs/hello.md b/scaladoc/test-documentations/noConfigEnd/_docs/hello.md new file mode 100644 index 000000000000..3809c65bce02 --- /dev/null +++ b/scaladoc/test-documentations/noConfigEnd/_docs/hello.md @@ -0,0 +1,3 @@ +--- +title: My page +foo: bar diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala index 4d8a9f46f21e..d5b7a0b9b6f8 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala @@ -43,7 +43,7 @@ abstract class SignatureTest( val unexpected = unexpectedFromSources.flatMap(actualSignatures.get).flatten val expectedButNotFound = expectedFromSources.flatMap { - case (k, v) => findMissingSingatures(v, actualSignatures.getOrElse(k, Nil)) + case (k, v) => findMissingSignatures(v, actualSignatures.getOrElse(k, Nil)) } val missingReport = Option.when(!ignoreMissingSignatures && !expectedButNotFound.isEmpty) @@ -75,7 +75,7 @@ abstract class SignatureTest( private val unexpectedRegex = raw"(.+)//unexpected".r private val identifierRegex = raw"^\s*(`.*`|(?:\w+)(?:_[^\[\(\s]+)|\w+|[^\[\(\s]+)".r - private def findMissingSingatures(expected: Seq[String], actual: Seq[String]): Set[String] = + private def findMissingSignatures(expected: Seq[String], actual: Seq[String]): Set[String] = expected.toSet &~ actual.toSet extension (s: String) @@ -84,10 +84,12 @@ abstract class SignatureTest( private def findName(signature: String, kinds: Seq[String]): Option[String] = for - kindMatch <- kinds.flatMap(k => s"\\b$k\\b".r.findFirstMatchIn(signature)).headOption + kindMatch <- kinds.flatMap(k =>s"\\b$k\\b".r.findFirstMatchIn(signature)).headOption + kind <- Option(kindMatch.group(0)) // to filter out nulls afterKind <- Option(kindMatch.after(0)) // to filter out nulls - nameMatch <- identifierRegex.findFirstMatchIn(afterKind) - yield nameMatch.group(1) + name <- if kind.contains("extension") then Some(signature) // The name of an extension will always be the signature itself + else identifierRegex.findFirstMatchIn(afterKind).map(_.group(1)) + yield name private def signaturesFromSources(source: Source, kinds: Seq[String]): Seq[SignatureRes] = source.getLines.map(_.trim) @@ -110,6 +112,9 @@ abstract class SignatureTest( def processFile(path: Path): Unit = if filterFunc(path) then val document = Jsoup.parse(IO.read(path)) + val documentable = document.select(".groupHeader").forEach { element => + signatures += element.text + } val content = document.select(".documentableElement").forEach { elem => val annotations = elem.select(".annotations").eachText.asScala.mkString("") val other = elem.select(".header .other-modifiers").eachText.asScala.mkString("") @@ -123,12 +128,11 @@ abstract class SignatureTest( signatures += all } - IO.foreachFileIn(output, processFile) signatures.result object SignatureTest { val classlikeKinds = Seq("class", "object", "trait", "enum") // TODO add docs for packages - val members = Seq("type", "def", "val", "var", "given") + val members = Seq("type", "def", "val", "var", "given", "extension") val all = classlikeKinds ++ members } diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala index 7da1bb9b7e03..a09234be5512 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala @@ -39,7 +39,9 @@ class PackageObjectSymbolSignatures extends SignatureTest("packageObjectSymbolSi class MergedPackageSignatures extends SignatureTest("mergedPackage", SignatureTest.all.filterNot(_ == "object"), sourceFiles = List("mergedPackage1", "mergedPackage2", "mergedPackage3")) -class ExtensionMethodSignature extends SignatureTest("extensionMethodSignatures", SignatureTest.all) +class ExtensionMethodSignature extends SignatureTest("extensionMethodSignatures", SignatureTest.all.filterNot(_ == "extension")) + +class ExtensionMethodParamsSignature extends SignatureTest("extensionParams", SignatureTest.all) class ClassModifiers extends SignatureTest("classModifiers", SignatureTest.classlikeKinds) @@ -104,3 +106,7 @@ class ImplicitMembers extends SignatureTest( Seq("def"), filterFunc = _.toString.endsWith("OuterClass$ImplicitMemberTarget.html") ) + +class NonScala3Parent extends SignatureTest("nonScala3Parent", SignatureTest.all) + +class ExtendsCall extends SignatureTest("extendsCall", SignatureTest.all) diff --git a/scaladoc/test/dotty/tools/scaladoc/site/BlogParserTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/BlogParserTest.scala new file mode 100644 index 000000000000..e27c257c8e4a --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/site/BlogParserTest.scala @@ -0,0 +1,19 @@ +package dotty.tools.scaladoc +package site + +import org.junit.Test +import org.junit.Assert._ + +class BlogParserTest: + + private val blogConfig = + """input: blog + |output: blog + |hidden: false + |""".stripMargin + + @Test + def loadBlog(): Unit = assertEquals( + BlogConfig("blog", "blog", false), + BlogParser.readYml(blogConfig) + ) \ No newline at end of file diff --git a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala index 7ce16933997a..e012044156cc 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala @@ -95,6 +95,22 @@ class SiteGeneratationTest extends BaseHtmlTest: testApiPages(mainTitle = projectName, parents = Nil, hasToplevelIndexIndex = false) } + @Test + def emptyPage() = withGeneratedSite(testDocPath.resolve("emptyPage")){ + withHtmlFile("docs/hello.html") { content => + // There should be no content as the page body is empty. + content.assertTextsIn("#content", Nil*) + } + } + + @Test + def noConfigEnd() = withGeneratedSite(testDocPath.resolve("noConfigEnd")){ + withHtmlFile("docs/hello.html") { content => + // There should be no content as the page body is empty. + content.assertTextsIn("#content", Nil*) + } + } + @Test def staticLinking() = withGeneratedSite(testDocPath.resolve("static-links")){ diff --git a/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala b/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala index 203ab9cf5ed1..f07868ad4f44 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala @@ -202,6 +202,7 @@ class TemplateFileTests: content -> "md" ) ) + @Test def markdown(): Unit = testTemplate( @@ -222,10 +223,10 @@ class TemplateFileTests: ext = "md" ) { t => assertEquals( - """
      - |

      Hello there!

      + """
      + |

      Hello there2!

      |
      """.stripMargin, - t.resolveInner(RenderingContext(Map("msg" -> "there"))).code.trim()) + t.resolveInner(RenderingContext(Map("msg" -> "there2"))).code.trim()) } @Test diff --git a/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala index 1f28c938033d..616f7ae7f35e 100644 --- a/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala @@ -13,7 +13,6 @@ import dotty.tools.dotc.reporting.{ Diagnostic, StoreReporter } import com.vladsch.flexmark.util.{ast => mdu, sequence} import com.vladsch.flexmark.{ast => mda} import com.vladsch.flexmark.formatter.Formatter -import com.vladsch.flexmark.util.options.MutableDataSet import scala.jdk.CollectionConverters._ import dotty.tools.scaladoc.tasty.comments.markdown.ExtendedFencedCodeBlock diff --git a/scaladoc/test/dotty/tools/scaladoc/testUtils.scala b/scaladoc/test/dotty/tools/scaladoc/testUtils.scala index 21ed7398f74e..2ba78c321eab 100644 --- a/scaladoc/test/dotty/tools/scaladoc/testUtils.scala +++ b/scaladoc/test/dotty/tools/scaladoc/testUtils.scala @@ -11,9 +11,9 @@ import java.nio.file.Paths case class ReportedDiagnostics(errors: List[Diagnostic], warnings: List[Diagnostic], infos: List[Diagnostic]): - def errorMsgs = errors.map(_.msg.rawMessage) - def warningMsgs = warnings.map(_.msg.rawMessage) - def infoMsgs = infos.map(_.msg.rawMessage) + def errorMsgs = errors.map(_.msg.message) + def warningMsgs = warnings.map(_.msg.message) + def infoMsgs = infos.map(_.msg.message) extension (c: CompilerContext) def reportedDiagnostics: ReportedDiagnostics = diff --git a/semanticdb/project/build.properties b/semanticdb/project/build.properties index 22af2628c413..46e43a97ed86 100644 --- a/semanticdb/project/build.properties +++ b/semanticdb/project/build.properties @@ -1 +1 @@ -sbt.version=1.7.1 +sbt.version=1.8.2 diff --git a/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSCompilationTests.scala b/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSCompilationTests.scala index ca4f292568bb..0f4eb633b770 100644 --- a/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSCompilationTests.scala +++ b/sjs-compiler-tests/test/scala/dotty/tools/dotc/ScalaJSCompilationTests.scala @@ -6,6 +6,7 @@ import org.junit.{ Test, BeforeClass, AfterClass } import org.junit.experimental.categories.Category import scala.concurrent.duration._ +import reporting.TestReporter import vulpix._ @Category(Array(classOf[ScalaJSCompilationTests])) @@ -23,6 +24,7 @@ class ScalaJSCompilationTests extends ParallelTesting { def isInteractive = SummaryReport.isInteractive def testFilter = Properties.testsFilter def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests // Negative tests ------------------------------------------------------------ diff --git a/staging/src/scala/quoted/staging/Compiler.scala b/staging/src/scala/quoted/staging/Compiler.scala index c9abe3fa75c3..fbe6a3915a08 100644 --- a/staging/src/scala/quoted/staging/Compiler.scala +++ b/staging/src/scala/quoted/staging/Compiler.scala @@ -13,7 +13,7 @@ object Compiler: /** Create a new instance of the compiler using the the classloader of the application. * - * Usuage: + * Usage: * ``` * import scala.quoted.staging._ * given Compiler = Compiler.make(getClass.getClassLoader) diff --git a/staging/test-resources/repl-staging/i6007 b/staging/test-resources/repl-staging/i6007 index be9d5c0f92d6..0d6fbd0cffb1 100644 --- a/staging/test-resources/repl-staging/i6007 +++ b/staging/test-resources/repl-staging/i6007 @@ -3,7 +3,7 @@ scala> import quoted.staging.{Compiler => StagingCompiler, _} scala> implicit def compiler: StagingCompiler = StagingCompiler.make(getClass.getClassLoader) def compiler: quoted.staging.Compiler scala> def v(using Quotes) = '{ (if true then Some(1) else None).map(v => v+1) } -def v(using x$1: quoted.Quotes): quoted.Expr[Option[Int]] +def v(using x$1: quoted.Quotes): scala.quoted.Expr[Option[Int]] scala> scala.quoted.staging.withQuotes(v.show) val res0: String = (if (true) scala.Some.apply[scala.Int](1) else scala.None).map[scala.Int](((v: scala.Int) => v.+(1))) scala> scala.quoted.staging.run(v) diff --git a/tasty/src/dotty/tools/tasty/TastyBuffer.scala b/tasty/src/dotty/tools/tasty/TastyBuffer.scala index 1d48027087f5..f9266cf23617 100644 --- a/tasty/src/dotty/tools/tasty/TastyBuffer.scala +++ b/tasty/src/dotty/tools/tasty/TastyBuffer.scala @@ -193,4 +193,9 @@ class TastyBuffer(initialSize: Int) { * After `assemble` no more output actions to this buffer are permitted. */ def assemble(): Unit = () + + def reset(): Unit = { + java.util.Arrays.fill(bytes, 0, length, 0.toByte) + length = 0 + } } diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 98cba90bdccf..39d559234868 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -91,6 +91,7 @@ Standard-Section: "ASTs" TopLevelStat* THROW throwableExpr_Term -- throw throwableExpr NAMEDARG paramName_NameRef arg_Term -- paramName = arg APPLY Length fn_Term arg_Term* -- fn(args) + APPLYsigpoly Length fn_Term meth_Type arg_Term* -- The application of a signature-polymorphic method TYPEAPPLY Length fn_Term arg_Type* -- fn[args] SUPER Length this_Term mixinTypeIdent_Tree? -- super[mixin] TYPED Length expr_Term ascriptionType_Term -- expr: ascription @@ -121,7 +122,8 @@ Standard-Section: "ASTs" TopLevelStat* MATCHtpt Length bound_Term? sel_Term CaseDef* -- sel match { CaseDef } where `bound` is optional upper bound of all rhs BYNAMEtpt underlying_Term -- => underlying SHAREDterm term_ASTRef -- Link to previously serialized term - HOLE Length idx_Nat arg_Tree* -- Hole where a splice goes with sequence number idx, splice is applied to arguments `arg`s + HOLE Length idx_Nat tpe_Type arg_Tree* -- Splice hole with index `idx`, the type of the hole `tpe`, type and term arguments of the hole `arg`s + CaseDef = CASEDEF Length pat_Term rhs_Tree guard_Tree? -- case pat if guard => rhs ImplicitArg = IMPLICITARG arg_Term -- implicit unapply argument @@ -288,7 +290,7 @@ object TastyFormat { * compatibility, but remains backwards compatible, with all * preceeding `MinorVersion`. */ - final val MinorVersion: Int = 2 + final val MinorVersion: Int = 3 /** Natural Number. The `ExperimentalVersion` allows for * experimentation with changes to TASTy without committing @@ -578,6 +580,7 @@ object TastyFormat { // final val ??? = 178 // final val ??? = 179 final val METHODtype = 180 + final val APPLYsigpoly = 181 final val MATCHtype = 190 final val MATCHtpt = 191 @@ -744,6 +747,7 @@ object TastyFormat { case BOUNDED => "BOUNDED" case APPLY => "APPLY" case TYPEAPPLY => "TYPEAPPLY" + case APPLYsigpoly => "APPLYsigpoly" case NEW => "NEW" case THROW => "THROW" case TYPED => "TYPED" diff --git a/tasty/src/dotty/tools/tasty/TastyHash.scala b/tasty/src/dotty/tools/tasty/TastyHash.scala index aff663f42a8d..701328d578a3 100644 --- a/tasty/src/dotty/tools/tasty/TastyHash.scala +++ b/tasty/src/dotty/tools/tasty/TastyHash.scala @@ -6,10 +6,10 @@ object TastyHash { * * from https://en.wikipedia.org/wiki/PJW_hash_function#Algorithm */ - def pjwHash64(data: Array[Byte]): Long = { + def pjwHash64(data: Array[Byte], length: Int): Long = { var h = 0L var i = 0 - while (i < data.length) { + while (i < length) { val d = data(i) & 0xFFL // Interpret byte as unsigned byte h = (h << 8) + d val high = h & 0xFF00000000000000L @@ -19,4 +19,6 @@ object TastyHash { } h } + def pjwHash64(data: Array[Byte]): Long = + pjwHash64(data, data.length) } diff --git a/tasty/src/dotty/tools/tasty/util/Util.scala b/tasty/src/dotty/tools/tasty/util/Util.scala index 5726e65773b0..750f5956c5cc 100644 --- a/tasty/src/dotty/tools/tasty/util/Util.scala +++ b/tasty/src/dotty/tools/tasty/util/Util.scala @@ -11,4 +11,17 @@ object Util { arr1 } + /** Specialized version for bytes */ + def dble(arr: Array[Byte]): Array[Byte] = { + val arr1 = new Array[Byte](arr.length * 2) + System.arraycopy(arr, 0, arr1, 0, arr.length) + arr1 + } + + /** Specialized version for ints */ + def dble(arr: Array[Int]): Array[Int] = { + val arr1 = new Array[Int](arr.length * 2) + System.arraycopy(arr, 0, arr1, 0, arr.length) + arr1 + } } diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties index 22af2628c413..46e43a97ed86 100644 --- a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties @@ -1 +1 @@ -sbt.version=1.7.1 +sbt.version=1.8.2 diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties b/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties index 22af2628c413..46e43a97ed86 100644 --- a/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties @@ -1 +1 @@ -sbt.version=1.7.1 +sbt.version=1.8.2 diff --git a/tests/coverage/pos/Constructor.scala b/tests/coverage/pos/Constructor.scala index 251370ec8e6e..536bfa26f386 100644 --- a/tests/coverage/pos/Constructor.scala +++ b/tests/coverage/pos/Constructor.scala @@ -1,10 +1,20 @@ package covtest class C: + def this(arg: String) = { + this() + g() + } + + def this(x: Int) = + this(x.toString() + "foo") + def f(x: Int) = x def x = 1 f(x) + def g(): Int = 2 + object O: def g(y: Int) = y def y = 1 diff --git a/tests/coverage/pos/Constructor.scoverage.check b/tests/coverage/pos/Constructor.scoverage.check index 678da472fd4c..6a6742c9118d 100644 --- a/tests/coverage/pos/Constructor.scoverage.check +++ b/tests/coverage/pos/Constructor.scoverage.check @@ -24,10 +24,78 @@ covtest C Class covtest.C -f + 28 -33 +36 3 + +DefDef +false +0 +false +def this + +1 +Constructor.scala +covtest +C +Class +covtest.C + +69 +72 +5 +g +Apply +false +0 +false +g() + +2 +Constructor.scala +covtest +C +Class +covtest.C + +80 +88 +8 + +DefDef +false +0 +false +def this + +3 +Constructor.scala +covtest +C +Class +covtest.C + +108 +128 +9 ++ +Apply +false +0 +false +x.toString() + "foo" + +4 +Constructor.scala +covtest +C +Class +covtest.C +f +133 +138 +11 f DefDef false @@ -35,16 +103,16 @@ false false def f -1 +5 Constructor.scala covtest C Class covtest.C x -48 -53 -4 +153 +158 +12 x DefDef false @@ -52,16 +120,16 @@ false false def x -2 +6 Constructor.scala covtest C Class covtest.C -60 -64 -5 +165 +169 +13 f Apply false @@ -69,16 +137,16 @@ false false f(x) -3 +7 Constructor.scala covtest C Class covtest.C -62 -63 -5 +167 +168 +13 x Select false @@ -86,16 +154,33 @@ false false x -4 +8 +Constructor.scala +covtest +C +Class +covtest.C +g +173 +178 +15 +g +DefDef +false +0 +false +def g + +9 Constructor.scala covtest O$ Object covtest.O$ g -78 -83 -8 +203 +208 +18 g DefDef false @@ -103,16 +188,16 @@ false false def g -5 +10 Constructor.scala covtest O$ Object covtest.O$ y -98 -103 -9 +223 +228 +19 y DefDef false @@ -120,16 +205,16 @@ false false def y -6 +11 Constructor.scala covtest O$ Object covtest.O$ -110 -114 -10 +235 +239 +20 g Apply false @@ -137,16 +222,16 @@ false false g(y) -7 +12 Constructor.scala covtest O$ Object covtest.O$ -112 -113 -10 +237 +238 +20 y Ident false diff --git a/tests/explicit-nulls/run/i11332.scala b/tests/explicit-nulls/run/i11332.scala new file mode 100644 index 000000000000..73fb48839c16 --- /dev/null +++ b/tests/explicit-nulls/run/i11332.scala @@ -0,0 +1,22 @@ +// scalajs: --skip +import scala.language.unsafeNulls + +import java.lang.invoke._, MethodType.methodType + +// A copy of tests/run/i11332.scala +// to test the bootstrap minimisation which failed +// (because bootstrap runs under explicit nulls) +class Foo: + def neg(x: Int): Int = -x + +object Test: + def main(args: Array[String]): Unit = + val l = MethodHandles.lookup() + val self = new Foo() + + val res4 = { + l // explicit chain method call - previously derivedSelect broke the type + .findVirtual(classOf[Foo], "neg", methodType(classOf[Int], classOf[Int])) + .invokeExact(self, 4): Int + } + assert(-4 == res4) diff --git a/tests/generic-java-signatures/i15385/Lib.scala b/tests/generic-java-signatures/i15385/Lib.scala new file mode 100644 index 000000000000..81b00d964b3f --- /dev/null +++ b/tests/generic-java-signatures/i15385/Lib.scala @@ -0,0 +1,24 @@ +class Loc(val idx: Int) extends AnyVal + +class Foo: + def testNoParam[A <: Int]: A = 1.asInstanceOf[A] + def testSingleParam[A <: Int](a: A): A = 2.asInstanceOf[A] // (I)I + def testSingleParam2[A <: Int](a: A): Box[A] = new Box[A](a) // (I)LBox; + def testSingleParam3[A <: Int](box: Box[A]): A = box.value // (LBox;)I + def testOtherReturn[A <: Int](a: A): String = "3" + def testNoErasure[A <: String](a: A): A = "4".asInstanceOf[A] + def testMultiParam[A <: Int, B <: String](a: A, b: B): A = 5.asInstanceOf[A] + + def testVCNoParam[A <: Loc]: A = Loc(1).asInstanceOf[A] + def testVCSingleParam[A <: Loc](a: A): A = Loc(2).asInstanceOf[A] + def testVCOtherReturn[A <: Loc](a: A): String = "3" + def testVCNoErasure[A <: String](a: A): A = "4".asInstanceOf[A] + def testVCMultiParam[A <: Loc, B <: String](a: A, b: B): A = Loc(5).asInstanceOf[A] + +class Box[T](val value: T) + +class BarParent[X, Y] +trait BarInterface[F, G] +abstract class Bar[A <: Int](a: A) extends BarParent[A, String] with BarInterface[Int, A]: + def getMap: Map[String, A] + def bar[B](a: A, b: B): (A, B, Int) diff --git a/tests/generic-java-signatures/i15385/Test.java b/tests/generic-java-signatures/i15385/Test.java new file mode 100644 index 000000000000..184f104d0fb0 --- /dev/null +++ b/tests/generic-java-signatures/i15385/Test.java @@ -0,0 +1,18 @@ +public class Test { + public static void main(String[] args) throws Exception { + Foo foo = new Foo(); + System.out.println(foo.testNoParam()); + System.out.println(foo.testSingleParam(2)); + System.out.println(foo.testSingleParam2(21).value()); + System.out.println(foo.testSingleParam3(new Box(22))); + System.out.println(foo.testOtherReturn(3)); + System.out.println(foo.testNoErasure("4")); + System.out.println(foo.testMultiParam(5, "5")); + + System.out.println(foo.testVCNoParam()); + System.out.println(foo.testVCSingleParam(2)); + System.out.println(foo.testVCOtherReturn(3)); + System.out.println(foo.testVCNoErasure("4")); + System.out.println(foo.testVCMultiParam(5, "5")); + } +} diff --git a/tests/init/neg/apply2.scala b/tests/init/neg/apply2.scala old mode 100644 new mode 100755 index 83f64a6dd3c7..c6c7fe5fedd2 --- a/tests/init/neg/apply2.scala +++ b/tests/init/neg/apply2.scala @@ -3,8 +3,8 @@ object O: println(n) class B: - val a = A(this) + val a = A(this) // error val b = new B - val n = 10 // error + val n = 10 end O diff --git a/tests/init/neg/closureLeak.check b/tests/init/neg/closureLeak.check index 7019f2274ab6..a90acaa8ed00 100644 --- a/tests/init/neg/closureLeak.check +++ b/tests/init/neg/closureLeak.check @@ -1,16 +1,14 @@ -- Error: tests/init/neg/closureLeak.scala:11:14 ----------------------------------------------------------------------- 11 | l.foreach(a => a.addX(this)) // error | ^^^^^^^^^^^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Fun { this = ThisRef[class Outer], owner = class Outer }. Calling trace: - | -> class Outer { [ closureLeak.scala:1 ] - | ^ - | -> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] - | ^^^^^^^^^^^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (class Outer) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> class Outer { [ closureLeak.scala:1 ] + | ^ + |-> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] + | ^^^^^^^^^^^^^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class Outer]. - | Non initialized field(s): value p. Promotion trace: - | -> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] - | ^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class Outer) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value p. Promotion trace: + |-> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] + | ^^^^ diff --git a/tests/init/neg/cycle-structure.check b/tests/init/neg/cycle-structure.check index fb7b54c7cac2..dfe7c9b85e2f 100644 --- a/tests/init/neg/cycle-structure.check +++ b/tests/init/neg/cycle-structure.check @@ -1,14 +1,14 @@ -- Error: tests/init/neg/cycle-structure.scala:3:13 -------------------------------------------------------------------- 3 | val x = B(this) // error | ^^^^^^^ - | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: + | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: | -> case class A(b: B) { [ cycle-structure.scala:1 ] | ^ | -> val x = B(this) // error [ cycle-structure.scala:3 ] | ^^^^^^^ | | It leads to the following error during object initialization: - | Access field value x on a cold object. Calling trace: + | Access field value x on an uninitialized (Cold) object. Calling trace: | -> case class B(a: A) { [ cycle-structure.scala:7 ] | ^ | -> val x1 = a.x [ cycle-structure.scala:8 ] @@ -16,14 +16,14 @@ -- Error: tests/init/neg/cycle-structure.scala:9:13 -------------------------------------------------------------------- 9 | val x = A(this) // error | ^^^^^^^ - | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: + | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: | -> case class B(a: A) { [ cycle-structure.scala:7 ] | ^ | -> val x = A(this) // error [ cycle-structure.scala:9 ] | ^^^^^^^ | | It leads to the following error during object initialization: - | Access field value x on a cold object. Calling trace: + | Access field value x on an uninitialized (Cold) object. Calling trace: | -> case class A(b: B) { [ cycle-structure.scala:1 ] | ^ | -> val x1 = b.x [ cycle-structure.scala:2 ] diff --git a/tests/init/neg/default-this.check b/tests/init/neg/default-this.check index 6d08a64450d4..f64f36304e9b 100644 --- a/tests/init/neg/default-this.check +++ b/tests/init/neg/default-this.check @@ -1,14 +1,13 @@ -- Error: tests/init/neg/default-this.scala:9:8 ------------------------------------------------------------------------ 9 | compare() // error | ^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class B]. - | Non initialized field(s): value result. Calling trace: - | -> class B extends A { [ default-this.scala:6 ] - | ^ - | -> val result = updateThenCompare(5) [ default-this.scala:11 ] - | ^^^^^^^^^^^^^^^^^^^^ - | -> def updateThenCompare(c: Int): Boolean = { [ default-this.scala:7 ] - | ^ - | -> compare() // error [ default-this.scala:9 ] - | ^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class B) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value result. Calling trace: + |-> class B extends A { [ default-this.scala:6 ] + | ^ + |-> val result = updateThenCompare(5) [ default-this.scala:11 ] + | ^^^^^^^^^^^^^^^^^^^^ + |-> def updateThenCompare(c: Int): Boolean = { [ default-this.scala:7 ] + | ^ + |-> compare() // error [ default-this.scala:9 ] + | ^^^^^^^ diff --git a/tests/init/neg/early-promote4.scala b/tests/init/neg/early-promote4.scala index 65f917553974..487a75c5516f 100644 --- a/tests/init/neg/early-promote4.scala +++ b/tests/init/neg/early-promote4.scala @@ -8,13 +8,13 @@ class Outer { trait B { def bar() = assert(a == 5) } -} -class M(val o: Outer) extends A with o.B { - val n: Int = 10 + class M extends A with B { + val n: Int = 10 + } } class Dummy { val m: Int = n + 4 val n: Int = 10 // error -} \ No newline at end of file +} diff --git a/tests/init/neg/early-promote5.scala b/tests/init/neg/early-promote5.scala index 404f6fdb8d70..3f850b623ea3 100644 --- a/tests/init/neg/early-promote5.scala +++ b/tests/init/neg/early-promote5.scala @@ -8,13 +8,13 @@ class Outer { trait B { def bar(x: A) = println(a) } -} -class M(val o: Outer, c: Container) extends A with o.B + class M(c: Container) extends A with B +} class Container { val o = new Outer - val m = new M(o, this) // error + val m = new o.M(this) // error val s = "hello" } diff --git a/tests/init/neg/i15363.check b/tests/init/neg/i15363.check index 84cf268ef8a1..9912aa186a5b 100644 --- a/tests/init/neg/i15363.check +++ b/tests/init/neg/i15363.check @@ -1,14 +1,14 @@ -- Error: tests/init/neg/i15363.scala:3:10 ----------------------------------------------------------------------------- 3 | val b = new B(this) // error | ^^^^^^^^^^^ - | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: + | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: | -> class A: [ i15363.scala:1 ] | ^ | -> val b = new B(this) // error [ i15363.scala:3 ] | ^^^^^^^^^^^ | | It leads to the following error during object initialization: - | Access field value m on a cold object. Calling trace: + | Access field value m on an uninitialized (Cold) object. Calling trace: | -> class B(a: A): [ i15363.scala:7 ] | ^ | -> val x = a.m [ i15363.scala:8 ] diff --git a/tests/init/neg/i15459.check b/tests/init/neg/i15459.check index 93ba28554895..a8c9972276db 100644 --- a/tests/init/neg/i15459.check +++ b/tests/init/neg/i15459.check @@ -1,12 +1,11 @@ -- Error: tests/init/neg/i15459.scala:3:10 ----------------------------------------------------------------------------- 3 | println(this) // error | ^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class Sub]. - | Non initialized field(s): value b. Calling trace: - | -> class Sub extends Sup: [ i15459.scala:5 ] - | ^ - | -> class Sup: [ i15459.scala:1 ] - | ^ - | -> println(this) // error [ i15459.scala:3 ] - | ^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class Sub) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value b. Calling trace: + |-> class Sub extends Sup: [ i15459.scala:5 ] + | ^ + |-> class Sup: [ i15459.scala:1 ] + | ^ + |-> println(this) // error [ i15459.scala:3 ] + | ^^^^ diff --git a/tests/init/neg/inherit-non-hot.check b/tests/init/neg/inherit-non-hot.check index 408196333a27..068ba9662fd1 100644 --- a/tests/init/neg/inherit-non-hot.check +++ b/tests/init/neg/inherit-non-hot.check @@ -1,17 +1,17 @@ -- Error: tests/init/neg/inherit-non-hot.scala:6:32 -------------------------------------------------------------------- 6 | if b == null then b = new B(this) // error | ^^^^^^^^^^^^^^^ - | The RHS of reassignment must be hot. Found = Warm[class B] { outer = Hot, args = (Cold) }. Calling trace: - | -> class C extends A { [ inherit-non-hot.scala:15 ] - | ^ - | -> val bAgain = toB.getBAgain [ inherit-non-hot.scala:16 ] - | ^^^ - | -> def toB: B = [ inherit-non-hot.scala:5 ] - | ^ - | -> if b == null then b = new B(this) // error [ inherit-non-hot.scala:6 ] - | ^^^^^^^^^^^^^^^ + |The RHS of reassignment must be transitively initialized (Hot). It was found to be a non-transitively initialized (Warm) object of type (class B) { outer = a transitively initialized (Hot) object, args = (an uninitialized (Cold) object) }. Calling trace: + |-> class C extends A { [ inherit-non-hot.scala:15 ] + | ^ + |-> val bAgain = toB.getBAgain [ inherit-non-hot.scala:16 ] + | ^^^ + |-> def toB: B = [ inherit-non-hot.scala:5 ] + | ^ + |-> if b == null then b = new B(this) // error [ inherit-non-hot.scala:6 ] + | ^^^^^^^^^^^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Cannot prove that the field value a is hot. Found = Cold. Promotion trace: - | -> class B(a: A) { [ inherit-non-hot.scala:10 ] - | ^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Could not verify that the field value a is transitively initialized (Hot). It was found to be an uninitialized (Cold) object. Promotion trace: + |-> class B(a: A) { [ inherit-non-hot.scala:10 ] + | ^^^^ diff --git a/tests/init/neg/inlined-method.check b/tests/init/neg/inlined-method.check index 62bec184b825..f3061bcb63ed 100644 --- a/tests/init/neg/inlined-method.check +++ b/tests/init/neg/inlined-method.check @@ -1,12 +1,11 @@ -- Error: tests/init/neg/inlined-method.scala:8:45 --------------------------------------------------------------------- 8 | scala.runtime.Scala3RunTime.assertFailed(message) // error | ^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class InlineError]. - | Non initialized field(s): value v. Calling trace: - | -> class InlineError { [ inlined-method.scala:1 ] - | ^ - | -> Assertion.failAssert(this) [ inlined-method.scala:2 ] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - | -> scala.runtime.Scala3RunTime.assertFailed(message) // error [ inlined-method.scala:8 ] - | ^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class InlineError) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value v. Calling trace: + |-> class InlineError { [ inlined-method.scala:1 ] + | ^ + |-> Assertion.failAssert(this) [ inlined-method.scala:2 ] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + |-> scala.runtime.Scala3RunTime.assertFailed(message) // error [ inlined-method.scala:8 ] + | ^^^^^^^ diff --git a/tests/init/neg/inner-first.check b/tests/init/neg/inner-first.check index e1df69fbd4a2..fe90423c828f 100644 --- a/tests/init/neg/inner-first.check +++ b/tests/init/neg/inner-first.check @@ -1,10 +1,9 @@ -- Error: tests/init/neg/inner-first.scala:3:12 ------------------------------------------------------------------------ 3 | println(this) // error | ^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class B]. - | Non initialized field(s): value n. Calling trace: - | -> class B: [ inner-first.scala:2 ] - | ^ - | -> println(this) // error [ inner-first.scala:3 ] - | ^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class B) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value n. Calling trace: + |-> class B: [ inner-first.scala:2 ] + | ^ + |-> println(this) // error [ inner-first.scala:3 ] + | ^^^^ diff --git a/tests/init/neg/interleaving-params.scala b/tests/init/neg/interleaving-params.scala new file mode 100755 index 000000000000..f0f9cbaf3f53 --- /dev/null +++ b/tests/init/neg/interleaving-params.scala @@ -0,0 +1,9 @@ +import scala.language.experimental.clauseInterleaving + +class Params{ + def bar[T](x: T)[T]: String = ??? // error + def zoo(x: Int)[T, U](x: U): T = ??? // error + def bbb[T <: U](x: U)[U]: U = ??? // error // error + def f0[T](implicit x: T)[U](y: U) = (x,y) // error + def f1[T](implicit x: T)[U] = (x,y) // error +} \ No newline at end of file diff --git a/tests/init/neg/leak-warm.check b/tests/init/neg/leak-warm.check index d4d563fc456e..c2fc561a3668 100644 --- a/tests/init/neg/leak-warm.check +++ b/tests/init/neg/leak-warm.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/leak-warm.scala:19:18 ------------------------------------------------------------------------- 19 | val l2 = l.map(_.m()) // error | ^^^^^^^^^^^^ - | Call method method map on a cold object. Calling trace: + | Call method method map on an uninitialized (Cold) object. Calling trace: | -> object leakWarm { [ leak-warm.scala:1 ] | ^ | -> val l2 = l.map(_.m()) // error [ leak-warm.scala:19 ] diff --git a/tests/init/neg/promotion-loop.check b/tests/init/neg/promotion-loop.check index 3d1eb7e74aec..bc05640d10d2 100644 --- a/tests/init/neg/promotion-loop.check +++ b/tests/init/neg/promotion-loop.check @@ -1,15 +1,14 @@ -- Error: tests/init/neg/promotion-loop.scala:16:10 -------------------------------------------------------------------- 16 | println(b) // error | ^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Warm[class B] { outer = ThisRef[class Test] }. Calling trace: - | -> class Test { test => [ promotion-loop.scala:1 ] - | ^ - | -> println(b) // error [ promotion-loop.scala:16 ] - | ^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a non-transitively initialized (Warm) object of type (class B) { outer = the original object of type (class Test) where initialization checking started }. Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> class Test { test => [ promotion-loop.scala:1 ] + | ^ + |-> println(b) // error [ promotion-loop.scala:16 ] + | ^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Cannot prove that the field value outer is hot. Found = ThisRef[class Test]. - | Non initialized field(s): value n. Promotion trace: - | -> val outer = test [ promotion-loop.scala:12 ] - | ^^^^^^^^^^^^^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Could not verify that the field value outer is transitively initialized (Hot). It was found to be the original object of type (class Test) where initialization checking started. + |Non initialized field(s): value n. Promotion trace: + |-> val outer = test [ promotion-loop.scala:12 ] + | ^^^^^^^^^^^^^^^^ diff --git a/tests/init/neg/promotion-segment3.check b/tests/init/neg/promotion-segment3.check index 220af18bd29a..a7320b5c3ed3 100644 --- a/tests/init/neg/promotion-segment3.check +++ b/tests/init/neg/promotion-segment3.check @@ -1,12 +1,11 @@ -- Error: tests/init/neg/promotion-segment3.scala:9:6 ------------------------------------------------------------------ 9 | bar(new B) // error | ^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Warm[class B] { outer = ThisRef[class A] }. Calling trace: - | -> class A: [ promotion-segment3.scala:2 ] - | ^ - | -> bar(new B) // error [ promotion-segment3.scala:9 ] - | ^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a non-transitively initialized (Warm) object of type (class B) { outer = the original object of type (class A) where initialization checking started }. Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> class A: [ promotion-segment3.scala:2 ] + | ^ + |-> bar(new B) // error [ promotion-segment3.scala:9 ] + | ^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Promotion cancelled as the value contains inner class C. + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Promotion cancelled as the value contains inner class C. diff --git a/tests/init/neg/secondary-ctor4.check b/tests/init/neg/secondary-ctor4.check index 1bf1a7286357..e867ba65ded5 100644 --- a/tests/init/neg/secondary-ctor4.check +++ b/tests/init/neg/secondary-ctor4.check @@ -1,14 +1,14 @@ -- Error: tests/init/neg/secondary-ctor4.scala:54:14 ------------------------------------------------------------------- 54 | val c = new C(b, 5) // error | ^^^^^^^^^^^ - | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: + | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: | -> class D { [ secondary-ctor4.scala:52 ] | ^ | -> val c = new C(b, 5) // error [ secondary-ctor4.scala:54 ] | ^^^^^^^^^^^ | | It leads to the following error during object initialization: - | Access field value n on a cold object. Calling trace: + | Access field value n on an uninitialized (Cold) object. Calling trace: | -> def this(b: B, x: Int) = this(b) [ secondary-ctor4.scala:49 ] | ^^^^^^^ | -> class C(b: B) extends A(b) with T { [ secondary-ctor4.scala:48 ] @@ -24,7 +24,7 @@ -- Error: tests/init/neg/secondary-ctor4.scala:42:4 -------------------------------------------------------------------- 42 | new A(new B(new D)) // error | ^^^^^^^^^^^^^^^^^^^ - |Problematic object instantiation: the outer M.this and arg 1 are not hot (transitively initialized). Calling trace: + |Problematic object instantiation: the outer M.this and arg 1 are not transitively initialized (Hot). Calling trace: |-> class N(d: D) extends M(d) { [ secondary-ctor4.scala:59 ] | ^ |-> def this(d: D) = { [ secondary-ctor4.scala:7 ] @@ -33,7 +33,7 @@ | ^^^^^^^^^^^^^^^^^^^ | |It leads to the following error during object initialization: - |Access field value n on a cold object. Calling trace: + |Access field value n on an uninitialized (Cold) object. Calling trace: |-> def this(b: B) = { [ secondary-ctor4.scala:17 ] | ^ |-> Inner().foo() [ secondary-ctor4.scala:26 ] diff --git a/tests/init/neg/t3273.check b/tests/init/neg/t3273.check index e548a5964cac..0fe7ea78871c 100644 --- a/tests/init/neg/t3273.check +++ b/tests/init/neg/t3273.check @@ -1,28 +1,26 @@ -- Error: tests/init/neg/t3273.scala:4:42 ------------------------------------------------------------------------------ 4 | val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error | ^^^^^^^^^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Fun { this = ThisRef[object Test], owner = object Test }. Calling trace: - | -> object Test { [ t3273.scala:3 ] - | ^ - | -> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] - | ^^^^^^^^^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (object Test) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> object Test { [ t3273.scala:3 ] + | ^ + |-> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] + | ^^^^^^^^^^^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Access non-initialized value num1. Promotion trace: - | -> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] - | ^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Access non-initialized value num1. Promotion trace: + |-> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] + | ^^^^ -- Error: tests/init/neg/t3273.scala:5:61 ------------------------------------------------------------------------------ 5 | val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Fun { this = ThisRef[object Test], owner = object Test }. Calling trace: - | -> object Test { [ t3273.scala:3 ] - | ^ - | -> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (object Test) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> object Test { [ t3273.scala:3 ] + | ^ + |-> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Access non-initialized value num2. Promotion trace: - | -> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] - | ^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Access non-initialized value num2. Promotion trace: + |-> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] + | ^^^^ diff --git a/tests/init/neg/unsound1.check b/tests/init/neg/unsound1.check index c3057a6a6067..d114ba072db6 100644 --- a/tests/init/neg/unsound1.check +++ b/tests/init/neg/unsound1.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/unsound1.scala:2:35 --------------------------------------------------------------------------- 2 | if (m > 0) println(foo(m - 1).a2.n) // error | ^^^^^^^^^^^^^^^ - | Access field variable n on a cold object. Calling trace: + | Access field variable n on an uninitialized (Cold) object. Calling trace: | -> class A(m: Int) { [ unsound1.scala:1 ] | ^ | -> if (m > 0) println(foo(m - 1).a2.n) // error [ unsound1.scala:2 ] diff --git a/tests/init/neg/unsound2.check b/tests/init/neg/unsound2.check index a90b16c8bf71..69d1278e94df 100644 --- a/tests/init/neg/unsound2.check +++ b/tests/init/neg/unsound2.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/unsound2.scala:5:26 --------------------------------------------------------------------------- 5 | def getN: Int = a.n // error | ^^^ - | Access field value n on a cold object. Calling trace: + | Access field value n on an uninitialized (Cold) object. Calling trace: | -> case class A(x: Int) { [ unsound2.scala:1 ] | ^ | -> println(foo(x).getB) [ unsound2.scala:8 ] diff --git a/tests/init/neg/unsound3.check b/tests/init/neg/unsound3.check index d62b97e1abaf..c32e66272d1a 100644 --- a/tests/init/neg/unsound3.check +++ b/tests/init/neg/unsound3.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/unsound3.scala:10:38 -------------------------------------------------------------------------- 10 | if (x < 12) then foo().getC().b else newB // error | ^^^^^^^^^^^^^^ - | Access field value b on a cold object. Calling trace: + | Access field value b on an uninitialized (Cold) object. Calling trace: | -> class C { [ unsound3.scala:5 ] | ^ | -> val b = foo() [ unsound3.scala:12 ] diff --git a/tests/init/pos/interleaving-overload.scala b/tests/init/pos/interleaving-overload.scala new file mode 100755 index 000000000000..260b3538214a --- /dev/null +++ b/tests/init/pos/interleaving-overload.scala @@ -0,0 +1,24 @@ +import scala.language.experimental.clauseInterleaving + +class A{ + + def f1[T](x: Any)[U] = ??? + def f1[T](x: Int)[U] = ??? + + f1(1) + f1("hello") + f1[Boolean]("a")[Int] + f1[Boolean](1)[Int] + + case class B[U](x: Int) + def b[U](x: Int) = B[U](x) + + def f2[T]: [U] => Int => B[U] = [U] => (x: Int) => b[U](x) + + f2(1) + f2[Any](1) + f2[Any][Any](1) + + b[Int](5) + +} \ No newline at end of file diff --git a/tests/init/pos/interleaving-params.scala b/tests/init/pos/interleaving-params.scala new file mode 100755 index 000000000000..9f98b5f35d5b --- /dev/null +++ b/tests/init/pos/interleaving-params.scala @@ -0,0 +1,19 @@ +import scala.collection.mutable.AbstractSet +import scala.collection.mutable.BitSet +import scala.language.experimental.clauseInterleaving + +class Params{ + type U + def foo[T](x: T)[U >: x.type <: T](using U)[L <: List[U]](l: L): L = ??? + def aaa(x: U): U = ??? + def bbb[T <: U](x: U)[U]: U = ??? + + foo[AbstractSet[Int]](BitSet())[AbstractSet[Int]](using BitSet())[List[AbstractSet[Int]]](List[AbstractSet[Int]]()) +} + +class Param2 extends Params { + type U = AbstractSet[Int] + + aaa(BitSet()) + bbb[BitSet](BitSet())[AbstractSet[Int]] +} \ No newline at end of file diff --git a/tests/init/pos/recursive.scala b/tests/init/pos/recursive.scala new file mode 100644 index 000000000000..74b658330e03 --- /dev/null +++ b/tests/init/pos/recursive.scala @@ -0,0 +1,9 @@ +class A { + def p(cb: Int => Int): Int = cb(0) + + val q: List[Int] = { + def f(x: Int): Int => Int = y => p(f(y)) + List(1, 2).map(f(3)) + } + val n: Int = 4 +} \ No newline at end of file diff --git a/tests/init/pos/self-ref.scala b/tests/init/pos/self-ref.scala new file mode 100644 index 000000000000..1a9f199b9f7a --- /dev/null +++ b/tests/init/pos/self-ref.scala @@ -0,0 +1,9 @@ +class A { + def foo(a: Int) = { + lazy val x: Int = if (a == 0) x else 0 + println(x) + } + foo(0) + + val y = 5 +} diff --git a/tests/pos-custom-args/captures/boxmap.scala b/tests/neg-custom-args/boxmap.scala similarity index 63% rename from tests/pos-custom-args/captures/boxmap.scala rename to tests/neg-custom-args/boxmap.scala index 18baabd4e584..1696ac3505e4 100644 --- a/tests/pos-custom-args/captures/boxmap.scala +++ b/tests/neg-custom-args/boxmap.scala @@ -1,5 +1,5 @@ import annotation.retains -type Top = Any @retains(caps.*) +type Top = Any @retains(caps.cap) type Box[+T <: Top] = ([K <: Top] -> (T => K) -> K) @@ -15,5 +15,7 @@ def lazymap[A <: Top, B <: Top](b: Box[A])(f: A => B): {f} (() -> Box[B]) = def test[A <: Top, B <: Top] = def lazymap[A <: Top, B <: Top](b: Box[A])(f: A => B) = () => b[Box[B]]((x: A) => box(f(x))) - val x: (b: Box[A]) -> (f: A => B) -> (() -> Box[B]) = lazymap[A, B] + val x0: (b: Box[A]) -> (f: A => B) -> (() -> Box[B]) = lazymap[A, B] // error + val x: (b: Box[A]) -> (f: A => B) -> (() ->{b, f} Box[B]) = lazymap[A, B] // works + val y: (b: Box[A]) -> (f: A => B) -> (() ->{cap} Box[B]) = lazymap[A, B] // works () diff --git a/tests/neg-custom-args/capt-wf.scala b/tests/neg-custom-args/capt-wf.scala index 3bd80e0d0f68..67e1bc9906fe 100644 --- a/tests/neg-custom-args/capt-wf.scala +++ b/tests/neg-custom-args/capt-wf.scala @@ -1,35 +1,35 @@ class C -type Cap = {*} C +type Cap = C^ object foo def test(c: Cap, other: String): Unit = - val x1: {*} C = ??? // OK - val x2: {other} C = ??? // error: cs is empty + val x1: C^ = ??? // OK + val x2: C^{other} = ??? // error: cs is empty val s1 = () => "abc" - val x3: {s1} C = ??? // error: cs is empty + val x3: C^{s1} = ??? // error: cs is empty val x3a: () -> String = s1 val s2 = () => if x1 == null then "" else "abc" - val x4: {s2} C = ??? // OK - val x5: {c, c} C = ??? // error: redundant - val x6: {c} {c} C = ??? // error: redundant - val x7: {c} Cap = ??? // error: redundant - val x8: {*} {c} C = ??? // OK - val x9: {c, *} C = ??? // error: redundant - val x10: {*, c} C = ??? // error: redundant + val x4: C^{s2} = ??? // OK + val x5: C^{c, c} = ??? // error: redundant + // val x6: C^{c}^{c} = ??? // would be syntax error + val x7: Cap^{c} = ??? // error: redundant + // val x8: C^{c}^{cap} = ??? // would be syntax error + val x9: C^{c, cap} = ??? // error: redundant + val x10: C^{cap, c} = ??? // error: redundant def even(n: Int): Boolean = if n == 0 then true else odd(n - 1) def odd(n: Int): Boolean = if n == 1 then true else even(n - 1) val e1 = even val o1 = odd - val y1: {e1} String = ??? // error cs is empty - val y2: {o1} String = ??? // error cs is empty + val y1: String^{e1} = ??? // error cs is empty + val y2: String^{o1} = ??? // error cs is empty lazy val ev: (Int -> Boolean) = (n: Int) => lazy val od: (Int -> Boolean) = (n: Int) => if n == 1 then true else ev(n - 1) if n == 0 then true else od(n - 1) - val y3: {ev} String = ??? // error cs is empty + val y3: String^{ev} = ??? // error cs is empty () \ No newline at end of file diff --git a/tests/neg-custom-args/captures/boundschecks.scala b/tests/neg-custom-args/captures/boundschecks.scala new file mode 100644 index 000000000000..766d89d2f37b --- /dev/null +++ b/tests/neg-custom-args/captures/boundschecks.scala @@ -0,0 +1,18 @@ +object test { + + class Tree + + def f[X <: Tree](x: X): Unit = () + + class C[X <: Tree](x: X) + + def foo(t: Tree^) = + f(t) // error + f[Tree^](t) // error + f[Tree](t) // error + val c1 = C(t) // error + val c2 = C[Tree^](t) // error + val c3 = C[Tree](t) // error + + val foo: C[Tree^] = ??? +} diff --git a/tests/neg-custom-args/captures/boundschecks2.scala b/tests/neg-custom-args/captures/boundschecks2.scala new file mode 100644 index 000000000000..923758d722f9 --- /dev/null +++ b/tests/neg-custom-args/captures/boundschecks2.scala @@ -0,0 +1,13 @@ +object test { + + class Tree + + def f[X <: Tree](x: X): Unit = () + + class C[X <: Tree](x: X) + + val foo: C[Tree^] = ??? // error + type T = C[Tree^] // error + val bar: T -> T = ??? + val baz: C[Tree^] -> Unit = ??? // error +} diff --git a/tests/neg-custom-args/captures/box-adapt-boxing.scala b/tests/neg-custom-args/captures/box-adapt-boxing.scala index 7a624d4225fc..ea133051a21a 100644 --- a/tests/neg-custom-args/captures/box-adapt-boxing.scala +++ b/tests/neg-custom-args/captures/box-adapt-boxing.scala @@ -1,23 +1,23 @@ trait Cap -def main(io: {*} Cap, fs: {*} Cap): Unit = { - val test1: {} Unit -> Unit = _ => { // error - type Op = [T] -> ({io} T -> Unit) -> Unit - val f: ({io} Cap) -> Unit = ??? +def main(io: Cap^, fs: Cap^): Unit = { + val test1: Unit -> Unit = _ => { // error + type Op = [T] -> (T ->{io} Unit) -> Unit + val f: (Cap^{io}) -> Unit = ??? val op: Op = ??? - op[{io} Cap](f) + op[Cap^{io}](f) // expected type of f: {io} (box {io} Cap) -> Unit // actual type: ({io} Cap) -> Unit // adapting f to the expected type will also // charge the environment with {io} } - val test2: {} Unit -> Unit = _ => { + val test2: Unit -> Unit = _ => { type Box[X] = X type Op0[X] = Box[X] -> Unit type Op1[X] = Unit -> Box[X] - val f: Unit -> ({io} Cap) -> Unit = ??? - val test: {} Op1[{io} Op0[{io} Cap]] = f + val f: Unit -> (Cap^{io}) -> Unit = ??? + val test: Op1[Op0[Cap^{io}]^{io}]^{} = f // expected: {} Unit -> box {io} (box {io} Cap) -> Unit // actual: Unit -> ({io} Cap) -> Unit // @@ -31,8 +31,8 @@ def main(io: {*} Cap, fs: {*} Cap): Unit = { type Box[X] = X type Id[X] = Box[X] -> Unit type Op[X] = Unit -> Box[X] - val f: Unit -> ({io} Cap) -> Unit = ??? - val g: Op[{fs} Id[{io} Cap]] = f // error - val h: {} Op[{io} Id[{io} Cap]] = f + val f: Unit -> (Cap^{io}) -> Unit = ??? + val g: Op[Id[Cap^{io}]^{fs}] = f // error + val h: Op[Id[Cap^{io}]^{io}] = f } } diff --git a/tests/neg-custom-args/captures/box-adapt-cases.scala b/tests/neg-custom-args/captures/box-adapt-cases.scala index 049ff385d73c..7010444eecb5 100644 --- a/tests/neg-custom-args/captures/box-adapt-cases.scala +++ b/tests/neg-custom-args/captures/box-adapt-cases.scala @@ -3,27 +3,27 @@ trait Cap { def use(): Int } def test1(): Unit = { type Id[X] = [T] -> (op: X => T) -> T - val x: Id[{*} Cap] = ??? - x(cap => cap.use()) // error + val x: Id[Cap^] = ??? + x(cap => cap.use()) // was error, now OK } -def test2(io: {*} Cap): Unit = { +def test2(io: Cap^{cap}): Unit = { type Id[X] = [T] -> (op: X -> T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // error } -def test3(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} X -> T) -> T +def test3(io: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: X ->{io} T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // ok } -def test4(io: {*} Cap, fs: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} X -> T) -> T +def test4(io: Cap^{cap}, fs: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: X ->{io} T) -> T - val x: Id[{io, fs} Cap] = ??? + val x: Id[Cap^{io, fs}] = ??? x(cap => cap.use()) // error } diff --git a/tests/neg-custom-args/captures/box-adapt-cov.scala b/tests/neg-custom-args/captures/box-adapt-cov.scala index 2040a1c4654d..96901e81458d 100644 --- a/tests/neg-custom-args/captures/box-adapt-cov.scala +++ b/tests/neg-custom-args/captures/box-adapt-cov.scala @@ -1,14 +1,14 @@ trait Cap -def test1(io: {*} Cap) = { +def test1(io: Cap^{cap}) = { type Op[X] = [T] -> Unit -> X - val f: Op[{io} Cap] = ??? - val x: [T] -> Unit -> ({io} Cap) = f // error + val f: Op[Cap^{io}] = ??? + val x: [T] -> Unit -> Cap^{io} = f // error } -def test2(io: {*} Cap) = { - type Op[X] = [T] -> Unit -> {io} X - val f: Op[{io} Cap] = ??? - val x: Unit -> ({io} Cap) = f[Unit] // error - val x1: {io} Unit -> ({io} Cap) = f[Unit] // ok +def test2(io: Cap^{cap}) = { + type Op[X] = [T] -> Unit -> X^{io} + val f: Op[Cap^{io}] = ??? + val x: Unit -> Cap^{io} = f[Unit] // error + val x1: Unit ->{io} Cap^{io} = f[Unit] // ok } diff --git a/tests/neg-custom-args/captures/box-adapt-cs.scala b/tests/neg-custom-args/captures/box-adapt-cs.scala index e35388efd203..a39ed0200151 100644 --- a/tests/neg-custom-args/captures/box-adapt-cs.scala +++ b/tests/neg-custom-args/captures/box-adapt-cs.scala @@ -1,19 +1,17 @@ trait Cap { def use(): Int } -def test1(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} X -> T) -> T +def test1(io: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: X ->{io} T) -> T - val x: Id[{io} Cap] = ??? - val f: ({*} Cap) -> Unit = ??? + val x: Id[Cap^{io}] = ??? + val f: (Cap^{cap}) -> Unit = ??? x(f) // ok - // actual: {*} Cap -> Unit - // expected: {io} box {io} Cap -> Unit } -def test2(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {*} X -> T) -> T +def test2(io: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: X => T) -> T - val x: Id[{*} Cap] = ??? - val f: ({io} Cap) -> Unit = ??? + val x: Id[Cap^] = ??? + val f: Cap^{io} -> Unit = ??? x(f) // error } diff --git a/tests/neg-custom-args/captures/box-adapt-depfun.scala b/tests/neg-custom-args/captures/box-adapt-depfun.scala index 294e2c33f7fa..9416ffa040ab 100644 --- a/tests/neg-custom-args/captures/box-adapt-depfun.scala +++ b/tests/neg-custom-args/captures/box-adapt-depfun.scala @@ -1,23 +1,23 @@ trait Cap { def use(): Int } -def test1(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} X -> T) -> T +def test1(io: Cap^): Unit = { + type Id[X] = [T] -> (op: X ->{io} T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap]^{io} = ??? x(cap => cap.use()) // ok } -def test2(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} (x: X) -> T) -> T +def test2(io: Cap^): Unit = { + type Id[X] = [T] -> (op: (x: X) ->{io} T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // should work when the expected type is a dependent function } -def test3(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {} (x: X) -> T) -> T +def test3(io: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: (x: X) ->{} T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // error } diff --git a/tests/neg-custom-args/captures/box-adapt-typefun.scala b/tests/neg-custom-args/captures/box-adapt-typefun.scala index b14b07e72e9b..65a06cd68ed9 100644 --- a/tests/neg-custom-args/captures/box-adapt-typefun.scala +++ b/tests/neg-custom-args/captures/box-adapt-typefun.scala @@ -1,13 +1,13 @@ trait Cap { def use(): Int } -def test1(io: {*} Cap): Unit = { +def test1(io: Cap^{cap}): Unit = { type Op[X] = [T] -> X -> Unit - val f: [T] -> ({io} Cap) -> Unit = ??? - val op: Op[{io} Cap] = f // error + val f: [T] -> (Cap^{io}) -> Unit = ??? + val op: Op[Cap^{io}] = f // error } -def test2(io: {*} Cap): Unit = { +def test2(io: Cap^{cap}): Unit = { type Lazy[X] = [T] -> Unit -> X - val f: Lazy[{io} Cap] = ??? - val test: [T] -> Unit -> ({io} Cap) = f // error + val f: Lazy[Cap^{io}] = ??? + val test: [T] -> Unit -> (Cap^{io}) = f // error } diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index 486f94d599ac..b1d8fb3b5404 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -1,20 +1,14 @@ --- Warning: tests/neg-custom-args/captures/byname.scala:17:18 ---------------------------------------------------------- -17 | def h(x: {cap1} -> I) = x // warning - | ^ - | Style: by-name `->` should immediately follow closing `}` of capture set - | to avoid confusion with function type. - | That is, `{c}-> T` instead of `{c} -> T`. -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/byname.scala:10:6 ---------------------------------------- 10 | h(f2()) // error | ^^^^ - | Found: {cap1} (x$0: Int) -> Int - | Required: {cap2} Int -> Int + | Found: (x$0: Int) ->{cap1} Int + | Required: (x$0: Int) ->{cap2} Int | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/byname.scala:19:5 ---------------------------------------- 19 | h(g()) // error | ^^^ - | Found: {cap2} () ?-> I - | Required: {cap1} () ?-> I + | Found: () ?->{cap2} I + | Required: () ?->{cap1} I | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/byname.scala b/tests/neg-custom-args/captures/byname.scala index 1838647f2899..ac13174eb4f4 100644 --- a/tests/neg-custom-args/captures/byname.scala +++ b/tests/neg-custom-args/captures/byname.scala @@ -5,16 +5,16 @@ def test(cap1: Cap, cap2: Cap) = def g(x: Int) = if cap2 == cap2 then 1 else x def g2(x: Int) = if cap1 == cap1 then 1 else x def f2() = if cap1 == cap1 then g2 else g2 - def h(ff: => {cap2} Int -> Int) = ff + def h(ff: => Int ->{cap2} Int) = ff h(f()) // ok h(f2()) // error class I -def test2(cap1: Cap, cap2: Cap): {cap1} I = +def test2(cap1: Cap, cap2: Cap): I^{cap1} = def f() = if cap1 == cap1 then I() else I() def g() = if cap2 == cap2 then I() else I() - def h(x: {cap1} -> I) = x // warning + def h(x: ->{cap1} I) = x // ok h(f()) // OK h(g()) // error diff --git a/tests/neg-custom-args/captures/capt-depfun.scala b/tests/neg-custom-args/captures/capt-depfun.scala index a74764f432c7..20226b239198 100644 --- a/tests/neg-custom-args/captures/capt-depfun.scala +++ b/tests/neg-custom-args/captures/capt-depfun.scala @@ -1,8 +1,9 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) +class Str def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? - val ac: ((x: Cap) => String @retains(x) => String @retains(x)) = ??? - val dc: (({y, z} String) => {y, z} String) = ac(g()) // error + val ac: ((x: Cap) => Str @retains(x) => Str @retains(x)) = ??? + val dc: ((Str^{y, z}) => Str^{y, z}) = ac(g()) // error diff --git a/tests/neg-custom-args/captures/capt-depfun2.scala b/tests/neg-custom-args/captures/capt-depfun2.scala index 74b9441593c1..cb4bc5f9634d 100644 --- a/tests/neg-custom-args/captures/capt-depfun2.scala +++ b/tests/neg-custom-args/captures/capt-depfun2.scala @@ -1,11 +1,12 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) +class Str def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? - val ac: ((x: Cap) => Array[String @retains(x)]) = ??? - val dc = ac(g()) // error: Needs explicit type Array[? >: String <: {y, z} String] + val ac: ((x: Cap) => Array[Str @retains(x)]) = ??? + val dc = ac(g()) // error: Needs explicit type Array[? >: Str <: {y, z} Str] // This is a shortcoming of rechecking since the originally inferred - // type is `Array[String]` and the actual type after rechecking - // cannot be expressed as `Array[C String]` for any capture set C \ No newline at end of file + // type is `Array[Str]` and the actual type after rechecking + // cannot be expressed as `Array[C Str]` for any capture set C \ No newline at end of file diff --git a/tests/neg-custom-args/captures/capt-env.scala b/tests/neg-custom-args/captures/capt-env.scala index 52fa4abfdaa8..6602678af167 100644 --- a/tests/neg-custom-args/captures/capt-env.scala +++ b/tests/neg-custom-args/captures/capt-env.scala @@ -1,5 +1,5 @@ class C -type Cap = {*} C +type Cap = C^ class Pair[+A, +B](x: A, y: B): def fst: A = x diff --git a/tests/neg-custom-args/captures/capt-test.scala b/tests/neg-custom-args/captures/capt-test.scala index 1799fc5073ca..f14951f410c4 100644 --- a/tests/neg-custom-args/captures/capt-test.scala +++ b/tests/neg-custom-args/captures/capt-test.scala @@ -2,8 +2,8 @@ import annotation.retains import language.experimental.erasedDefinitions class CT[E <: Exception] -type CanThrow[E <: Exception] = CT[E] @retains(caps.*) -type Top = Any @retains(caps.*) +type CanThrow[E <: Exception] = CT[E] @retains(caps.cap) +type Top = Any @retains(caps.cap) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?=> R @@ -14,14 +14,14 @@ def raise[E <: Exception](e: E): Nothing throws E = throw e def foo(x: Boolean): Int throws Fail = if x then 1 else raise(Fail()) -def handle[E <: Exception, R <: Top](op: (CanThrow[E]) => R)(handler: E => R): R = +def handle[E <: Exception, sealed R <: Top](op: (CanThrow[E]) => R)(handler: E => R): R = val x: CanThrow[E] = ??? try op(x) catch case ex: E => handler(ex) def test: Unit = - val b = handle[Exception, () => Nothing] { + val b = handle[Exception, () => Nothing] { // error (x: CanThrow[Exception]) => () => raise(new Exception)(using x) - } { // error + } { (ex: Exception) => ??? } diff --git a/tests/neg-custom-args/captures/capt-wf-typer.scala b/tests/neg-custom-args/captures/capt-wf-typer.scala index 4fc50caed1f7..09b2841d3c77 100644 --- a/tests/neg-custom-args/captures/capt-wf-typer.scala +++ b/tests/neg-custom-args/captures/capt-wf-typer.scala @@ -1,11 +1,11 @@ import annotation.retains class C -type Cap = {*} C +type Cap = C^ object foo def test(c: Cap, other: String): Unit = - val x7: {c} String = ??? // OK + val x7: String^{c} = ??? // OK val x8: String @retains(x7 + x7) = ??? // error val x9: String @retains(foo) = ??? // error () \ No newline at end of file diff --git a/tests/neg-custom-args/captures/capt-wf2.scala b/tests/neg-custom-args/captures/capt-wf2.scala index ddde535fcab0..6c65e0dc77f7 100644 --- a/tests/neg-custom-args/captures/capt-wf2.scala +++ b/tests/neg-custom-args/captures/capt-wf2.scala @@ -1,5 +1,5 @@ @annotation.capability class C def test(c: C) = - var x: {c} Any = ??? - val y: {x} Any = x // error + var x: Any^{c} = ??? + val y: Any^{x} = x // error diff --git a/tests/neg-custom-args/captures/capt1.check b/tests/neg-custom-args/captures/capt1.check index 51ed3e6736cf..85d3b2a7ddcb 100644 --- a/tests/neg-custom-args/captures/capt1.check +++ b/tests/neg-custom-args/captures/capt1.check @@ -1,21 +1,21 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:4:2 ------------------------------------------ 4 | () => if x == null then y else y // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Found: {x} () -> ? C + | Found: () ->{x} C^? | Required: () -> C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:7:2 ------------------------------------------ 7 | () => if x == null then y else y // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Found: {x} () -> ? C + | Found: () ->{x} C^? | Required: Matchable | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:14:2 ----------------------------------------- 14 | def f(y: Int) = if x == null then y else y // error | ^ - | Found: {x} Int -> Int + | Found: Int ->{x} Int | Required: Matchable 15 | f | @@ -23,7 +23,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:21:2 ----------------------------------------- 21 | class F(y: Int) extends A: // error | ^ - | Found: {x} A + | Found: A^{x} | Required: A 22 | def m() = if x == null then y else y 23 | F(22) @@ -32,7 +32,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:26:2 ----------------------------------------- 26 | new A: // error | ^ - | Found: {x} A + | Found: A^{x} | Required: A 27 | def m() = if x == null then y else y | @@ -40,14 +40,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:32:24 ---------------------------------------- 32 | val z2 = h[() -> Cap](() => x) // error | ^^^^^^^ - | Found: {x} () -> Cap - | Required: () -> box {*} C - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:33:5 ----------------------------------------- -33 | (() => C()) // error - | ^^^^^^^^^ - | Found: ? () -> Cap - | Required: () -> box {*} C + | Found: () ->{x} box C^ + | Required: () -> box C^ | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/capt1.scala b/tests/neg-custom-args/captures/capt1.scala index 59ba874b02f5..651184e8d2c9 100644 --- a/tests/neg-custom-args/captures/capt1.scala +++ b/tests/neg-custom-args/captures/capt1.scala @@ -1,21 +1,21 @@ import annotation.retains class C -def f(x: C @retains(caps.*), y: C): () -> C = +def f(x: C @retains(caps.cap), y: C): () -> C = () => if x == null then y else y // error -def g(x: C @retains(caps.*), y: C): Matchable = +def g(x: C @retains(caps.cap), y: C): Matchable = () => if x == null then y else y // error -def h1(x: C @retains(caps.*), y: C): Any = +def h1(x: C @retains(caps.cap), y: C): Any = def f() = if x == null then y else y () => f() // ok -def h2(x: C @retains(caps.*)): Matchable = +def h2(x: C @retains(caps.cap)): Matchable = def f(y: Int) = if x == null then y else y // error f class A -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) def h3(x: Cap): A = class F(y: Int) extends A: // error @@ -27,10 +27,10 @@ def h4(x: Cap, y: Int): A = def m() = if x == null then y else y def foo() = - val x: C @retains(caps.*) = ??? + val x: C @retains(caps.cap) = ??? def h[X](a: X)(b: X) = a val z2 = h[() -> Cap](() => x) // error - (() => C()) // error + (() => C()) val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // ok val z4 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // what was inferred for z3 diff --git a/tests/neg-custom-args/captures/capt2.scala b/tests/neg-custom-args/captures/capt2.scala index 8b08832dfdb9..cd6f41424a22 100644 --- a/tests/neg-custom-args/captures/capt2.scala +++ b/tests/neg-custom-args/captures/capt2.scala @@ -1,9 +1,9 @@ //import scala.retains class C -type Cap = {*} C +type Cap = C^ -def f1(c: Cap): (() -> {c} C) = () => c // error, but would be OK under capture abbreciations for funciton types -def f2(c: Cap): ({c} () -> C) = () => c // error +def f1(c: Cap): (() -> C^{c}) = () => c // error, but would be OK under capture abbreciations for funciton types +def f2(c: Cap): (() ->{c} C) = () => c // error def h5(x: Cap): () -> C = f1(x) // error diff --git a/tests/neg-custom-args/captures/capt3.scala b/tests/neg-custom-args/captures/capt3.scala index 84164d433029..44a7ffdc6c4a 100644 --- a/tests/neg-custom-args/captures/capt3.scala +++ b/tests/neg-custom-args/captures/capt3.scala @@ -1,6 +1,6 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) def test1() = val x: Cap = C() diff --git a/tests/neg-custom-args/captures/caseclass/Test_2.scala b/tests/neg-custom-args/captures/caseclass/Test_2.scala index 4eac6a260292..bffc0a295bdc 100644 --- a/tests/neg-custom-args/captures/caseclass/Test_2.scala +++ b/tests/neg-custom-args/captures/caseclass/Test_2.scala @@ -2,7 +2,7 @@ def test(c: C) = val pure: () -> Unit = () => () val impure: () => Unit = pure - val mixed: {c} () -> Unit = pure + val mixed: () ->{c} Unit = pure val x = Ref(impure) val _: Ref = x // error val y = x.copy() @@ -16,10 +16,10 @@ def test(c: C) = val yc2: Ref = y2 val x3 = Ref(mixed) - val _: {c} Ref = x3 + val _: Ref^{c} = x3 val y3 = x3.copy() - val yc3: {c} Ref = y3 + val yc3: Ref^{c} = y3 val y4 = y3 match case Ref(xx) => xx - val y4c: {x3} () -> Unit = y4 + val y4c: () ->{x3} Unit = y4 diff --git a/tests/neg-custom-args/captures/cc-depfun.scala b/tests/neg-custom-args/captures/cc-depfun.scala new file mode 100644 index 000000000000..106a73dd7ce1 --- /dev/null +++ b/tests/neg-custom-args/captures/cc-depfun.scala @@ -0,0 +1,9 @@ +trait Cap { def use(): Unit } + +def main() = { + val f: (io: Cap^) -> () ->{} Unit = + io => () => io.use() // error + + val g: (Cap^) -> () ->{} Unit = + io => () => io.use() // error +} diff --git a/tests/neg-custom-args/captures/cc-subst-param-exact.scala b/tests/neg-custom-args/captures/cc-subst-param-exact.scala new file mode 100644 index 000000000000..35e4acb95fdc --- /dev/null +++ b/tests/neg-custom-args/captures/cc-subst-param-exact.scala @@ -0,0 +1,33 @@ +import language.experimental.captureChecking +import caps.* + +trait Ref[T] { def set(x: T): T } +def test() = { + + def swap[T](x: Ref[T]^)(y: Ref[T]^{x}): Unit = ??? + def foo[T](x: Ref[T]^): Unit = + swap(x)(x) + + def bar[T](x: () => Ref[T]^)(y: Ref[T]^{x}): Unit = + swap(x())(y) // error + + def baz[T](x: Ref[T]^)(y: Ref[T]^{x}): Unit = + swap(x)(y) +} + +trait IO +type Op = () -> Unit +def test2(c: IO^, f: Op^{c}) = { + def run(io: IO^)(op: Op^{io}): Unit = op() + run(c)(f) + + def bad(getIO: () => IO^, g: Op^{getIO}): Unit = + run(getIO())(g) // error +} + +def test3() = { + def run(io: IO^)(op: Op^{io}): Unit = ??? + val myIO: IO^ = ??? + val myOp: Op^{myIO} = ??? + run(myIO)(myOp) +} diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index c492df15078f..47207f913f1d 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/cc-this.scala:8:15 --------------------------------------- 8 | val y: C = this // error | ^^^^ - | Found: (C.this : {C.this.x} C) + | Found: (C.this : C^{C.this.x}) | Required: C | | longer explanation available when compiling with `-explain` @@ -9,10 +9,7 @@ 10 | class C2(val x: () => Int): // error | ^ | reference (C2.this.x : () => Int) is not included in allowed capture set {} of the self type of class C2 --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this.scala:17:8 --------------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------ 17 | class C4(val f: () => Int) extends C3 // error - | ^ - | illegal inheritance: self type {C4.this.f} C4 of class C4 does not conform to self type C3 - | of parent class C3 - | - | longer explanation available when compiling with `-explain` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | reference (C4.this.f : () => Int) is not included in allowed capture set {} of pure base class class C3 diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index d10519636ca8..e0df7c857c85 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -1,8 +1,6 @@ --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- 2 |class D extends C: // error - | ^ - | illegal inheritance: self type {*} D of class D does not conform to self type C - | of parent class C - | - | longer explanation available when compiling with `-explain` + |^ + |reference (scala.caps.cap : Any) is not included in allowed capture set {} of pure base class class C +3 | this: D^ => diff --git a/tests/neg-custom-args/captures/cc-this2/D_2.scala b/tests/neg-custom-args/captures/cc-this2/D_2.scala index 793f3f6353a9..b22e5e456092 100644 --- a/tests/neg-custom-args/captures/cc-this2/D_2.scala +++ b/tests/neg-custom-args/captures/cc-this2/D_2.scala @@ -1,3 +1,3 @@ class D extends C: // error - this: {*} D => + this: D^ => diff --git a/tests/neg-custom-args/captures/cc-this3.check b/tests/neg-custom-args/captures/cc-this3.check index 705cdfbc00d7..d57471c6872e 100644 --- a/tests/neg-custom-args/captures/cc-this3.check +++ b/tests/neg-custom-args/captures/cc-this3.check @@ -1,14 +1,14 @@ -- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this3.scala:8:6 --------------------------------------- 8 |class B extends A: // error | ^ - | illegal inheritance: self type {*} B of class B does not conform to self type {} A + | illegal inheritance: self type B^ of class B does not conform to self type A^{} | of parent class A | | longer explanation available when compiling with `-explain` -- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this3.scala:11:6 -------------------------------------- 11 |class C(val f: () => Int) extends A // error | ^ - | illegal inheritance: self type {C.this.f} C of class C does not conform to self type {} A + | illegal inheritance: self type C^{C.this.f} of class C does not conform to self type A^{} | of parent class A | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc-this3.scala b/tests/neg-custom-args/captures/cc-this3.scala index eeb9606f0c81..25af19dd6c4a 100644 --- a/tests/neg-custom-args/captures/cc-this3.scala +++ b/tests/neg-custom-args/captures/cc-this3.scala @@ -6,13 +6,13 @@ class A: val x: A = this class B extends A: // error - this: {*} B => + this: B^ => class C(val f: () => Int) extends A // error class A2 class B2 extends A2: // ok - this: {*} B2 => + this: B2^ => class C2(val f: () => Int) extends A2 // ok diff --git a/tests/neg-custom-args/captures/cc-this4.check b/tests/neg-custom-args/captures/cc-this4.check index a54ca8d57f4e..52c06f5bbc30 100644 --- a/tests/neg-custom-args/captures/cc-this4.check +++ b/tests/neg-custom-args/captures/cc-this4.check @@ -2,5 +2,5 @@ 1 |open class C: // error | ^ | class C needs an explicitly declared self type since its - | inferred self type {} C + | inferred self type C^{} | is not visible in other compilation units that define subclasses. diff --git a/tests/neg-custom-args/captures/cc-this5.check b/tests/neg-custom-args/captures/cc-this5.check index 8cc1ac9ccc5d..84ac97474b80 100644 --- a/tests/neg-custom-args/captures/cc-this5.check +++ b/tests/neg-custom-args/captures/cc-this5.check @@ -5,14 +5,14 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/cc-this5.scala:21:15 ------------------------------------- 21 | val x: A = this // error | ^^^^ - | Found: (A.this : {c} A) + | Found: (A.this : A^{c}) | Required: A | | longer explanation available when compiling with `-explain` -- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this5.scala:7:9 --------------------------------------- 7 | object D extends C: // error | ^ - | illegal inheritance: self type {c} D.type of object D does not conform to self type {} C + | illegal inheritance: self type D.type^{c} of object D does not conform to self type C^{} | of parent class C | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc1.scala b/tests/neg-custom-args/captures/cc1.scala index 10a9793eabe8..6787b417a3b2 100644 --- a/tests/neg-custom-args/captures/cc1.scala +++ b/tests/neg-custom-args/captures/cc1.scala @@ -1,5 +1,5 @@ import annotation.retains object Test: - def f[A <: Matchable @retains(caps.*)](x: A): Matchable = x // error + def f[A <: Matchable @retains(caps.cap)](x: A): Matchable = x // error diff --git a/tests/neg-custom-args/captures/class-constr.scala b/tests/neg-custom-args/captures/class-constr.scala index eeedf1043f37..9afb6972ccfa 100644 --- a/tests/neg-custom-args/captures/class-constr.scala +++ b/tests/neg-custom-args/captures/class-constr.scala @@ -6,10 +6,10 @@ class C(x: Cap, @constructorOnly y: Cap) def test(a: Cap, b: Cap) = val f = () => C(a, b) - val f_ok: {a, b} () -> {a} C = f - val f_no1: {a, b} () -> C = f // error - val f_no2: {a} () -> {a} C = f // error - val f_no3: {b} () -> {a} C = f // error + val f_ok: () ->{a, b} C^{a} = f + val f_no1: () ->{a, b} C = f // error + val f_no2: () ->{a} C^{a} = f // error + val f_no3: () ->{a} C^{a} = f // error class D: val xz = @@ -19,6 +19,6 @@ def test(a: Cap, b: Cap) = println(b) 2 val d = () => new D() - val d_ok1: {a, b} () -> {a, b} D = d - val d_ok2: () -> {a, b} D = d // because of function shorthand - val d_ok3: {a, b} () -> {b} D = d // error, but should work + val d_ok1: () ->{a, b} D^{a, b} = d + val d_ok2: () -> D^{a, b} = d // because of function shorthand + val d_ok3: () ->{a, b} D^{b} = d // error, but should work diff --git a/tests/neg-custom-args/captures/class-contra.check b/tests/neg-custom-args/captures/class-contra.check index 69a5f0097de8..6d4c89f872ad 100644 --- a/tests/neg-custom-args/captures/class-contra.check +++ b/tests/neg-custom-args/captures/class-contra.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/class-contra.scala:12:39 --------------------------------- -12 | def fun(x: K{val f: {a} T}) = x.setf(a) // error +12 | def fun(x: K{val f: T^{a}}) = x.setf(a) // error | ^ - | Found: (a : {x, y} T) + | Found: (a : T^{x, y}) | Required: T | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/class-contra.scala b/tests/neg-custom-args/captures/class-contra.scala index 270aaf9309a9..210fd4e331f1 100644 --- a/tests/neg-custom-args/captures/class-contra.scala +++ b/tests/neg-custom-args/captures/class-contra.scala @@ -1,13 +1,13 @@ class C -type Cap = {*} C +type Cap = C^ -class K(val f: {*} T): - def setf(x: {f} T) = ??? +class K(val f: T^): + def setf(x: T^{f}) = ??? class T def test(x: Cap, y: Cap) = - val a: {x, y} T = ??? - def fun(x: K{val f: {a} T}) = x.setf(a) // error + val a: T^{x, y} = ??? + def fun(x: K{val f: T^{a}}) = x.setf(a) // error () \ No newline at end of file diff --git a/tests/neg-custom-args/captures/classes.scala b/tests/neg-custom-args/captures/classes.scala index e4c141ea981b..3572e31a6f50 100644 --- a/tests/neg-custom-args/captures/classes.scala +++ b/tests/neg-custom-args/captures/classes.scala @@ -1,12 +1,12 @@ class B -type Cap = {*} B +type Cap = B^ class C0(n: Cap) // was error: class parameter must be a `val`, now OK class C(val n: Cap): - def foo(): {n} B = n + def foo(): B^{n} = n def test(x: Cap, y: Cap) = val c0 = C(x) val c1: C = c0 // error val c2 = if ??? then C(x) else identity(C(y)) - val c3: {x} C { val n: {x, y} B } = c2 // error + val c3: C { val n: B^{x, y} }^{x} = c2 // error diff --git a/tests/neg-custom-args/captures/ctest.scala b/tests/neg-custom-args/captures/ctest.scala index 08bec16d8177..ad10b43a7773 100644 --- a/tests/neg-custom-args/captures/ctest.scala +++ b/tests/neg-custom-args/captures/ctest.scala @@ -1,6 +1,6 @@ class CC -type Cap = {*} CC +type Cap = CC^ def test(cap1: Cap, cap2: Cap) = - var b: List[String => String] = Nil // was error, now OK - val bc = b.head // error + var b: List[String => String] = Nil // error + val bc = b.head // was error, now OK diff --git a/tests/neg-custom-args/captures/curried-simplified.check b/tests/neg-custom-args/captures/curried-simplified.check index 5d23a7a4955e..6a792314e4e3 100644 --- a/tests/neg-custom-args/captures/curried-simplified.check +++ b/tests/neg-custom-args/captures/curried-simplified.check @@ -1,42 +1,42 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:7:28 ---------------------------- 7 | def y1: () -> () -> Int = x1 // error | ^^ - | Found: {x} () -> {x} () -> Int + | Found: () ->? () ->{x} Int | Required: () -> () -> Int | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:9:28 ---------------------------- 9 | def y2: () -> () => Int = x2 // error | ^^ - | Found: {x} () -> {*} () -> Int + | Found: () ->{x} () => Int | Required: () -> () => Int | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:11:39 --------------------------- 11 | def y3: Cap -> Protect[Int -> Int] = x3 // error | ^^ - | Found: ? (x$0: Cap) -> {x$0} Int -> Int + | Found: (x$0: Cap) ->? Int ->{x$0} Int | Required: Cap -> Protect[Int -> Int] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:15:33 --------------------------- -15 | def y5: Cap -> {} Int -> Int = x5 // error - | ^^ - | Found: ? Cap -> {x} Int -> Int - | Required: Cap -> {} Int -> Int +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:15:32 --------------------------- +15 | def y5: Cap -> Int ->{} Int = x5 // error + | ^^ + | Found: Cap ->? Int ->{x} Int + | Required: Cap -> Int ->{} Int | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:17:49 --------------------------- -17 | def y6: Cap -> {} Cap -> Protect[Int -> Int] = x6 // error - | ^^ - | Found: ? (x$0: Cap) -> {x$0} (x$0: Cap) -> {x$0, x$0} Int -> Int - | Required: Cap -> {} Cap -> Protect[Int -> Int] +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:17:48 --------------------------- +17 | def y6: Cap -> Cap ->{} Protect[Int -> Int] = x6 // error + | ^^ + | Found: (x$0: Cap) ->? (x$0: Cap) ->{x$0} Int ->{x$0, x$0} Int + | Required: Cap -> Cap ->{} Protect[Int -> Int] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:19:49 --------------------------- -19 | def y7: Cap -> Protect[Cap -> {} Int -> Int] = x7 // error - | ^^ - | Found: ? (x$0: Cap) -> {x$0} (x: Cap) -> {x$0, x} Int -> Int - | Required: Cap -> Protect[Cap -> {} Int -> Int] +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:19:48 --------------------------- +19 | def y7: Cap -> Protect[Cap -> Int ->{} Int] = x7 // error + | ^^ + | Found: (x$0: Cap) ->? (x: Cap) ->{x$0} Int ->{x$0, x} Int + | Required: Cap -> Protect[Cap -> Int ->{} Int] | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/curried-simplified.scala b/tests/neg-custom-args/captures/curried-simplified.scala index 25b23370d154..988cf7c11c45 100644 --- a/tests/neg-custom-args/captures/curried-simplified.scala +++ b/tests/neg-custom-args/captures/curried-simplified.scala @@ -3,19 +3,19 @@ type Protect[T] = T def test(x: Cap, y: Cap) = - def x1: {x} () -> () -> Int = ??? + def x1: () -> () ->{x} Int = ??? def y1: () -> () -> Int = x1 // error - def x2: {x} () -> () => Int = ??? + def x2: () ->{x} () => Int = ??? def y2: () -> () => Int = x2 // error def x3: Cap -> Int -> Int = ??? def y3: Cap -> Protect[Int -> Int] = x3 // error def x4: Cap -> Protect[Int -> Int] = ??? - def y4: Cap -> {} Int -> Int = x4 // ok - def x5: Cap -> {x} Int -> Int = ??? - def y5: Cap -> {} Int -> Int = x5 // error + def y4: Cap -> Int ->{} Int = x4 // ok + def x5: Cap -> Int ->{x} Int = ??? + def y5: Cap -> Int ->{} Int = x5 // error def x6: Cap -> Cap -> Int -> Int = ??? - def y6: Cap -> {} Cap -> Protect[Int -> Int] = x6 // error + def y6: Cap -> Cap ->{} Protect[Int -> Int] = x6 // error def x7: Cap -> (x: Cap) -> Int -> Int = ??? - def y7: Cap -> Protect[Cap -> {} Int -> Int] = x7 // error + def y7: Cap -> Protect[Cap -> Int ->{} Int] = x7 // error diff --git a/tests/neg-custom-args/captures/emptyref-in-self.scala b/tests/neg-custom-args/captures/emptyref-in-self.scala index 60f782deca6b..8bac47212f5b 100644 --- a/tests/neg-custom-args/captures/emptyref-in-self.scala +++ b/tests/neg-custom-args/captures/emptyref-in-self.scala @@ -1,3 +1,3 @@ -class Zip[A, B](underlying: String, other: {*} String) { - this: {underlying, other} Zip[A, B] => // error +class Zip[A, B](underlying: String, other: String^) { + this: Zip[A, B]^{underlying, other} => // error } diff --git a/tests/neg-custom-args/captures/eta.check b/tests/neg-custom-args/captures/eta.check index ebd63855181b..a77d66382095 100644 --- a/tests/neg-custom-args/captures/eta.check +++ b/tests/neg-custom-args/captures/eta.check @@ -1,14 +1,14 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/eta.scala:4:9 -------------------------------------------- 4 | g // error | ^ - | Found: ? () -> A - | Required: () -> {f} Proc + | Found: () ->? A + | Required: () -> Proc^{f} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/eta.scala:6:14 ------------------------------------------- 6 | bar( () => f ) // error | ^^^^^^^ - | Found: {f} () -> box {f} () -> Unit - | Required: () -> box ? () -> Unit + | Found: () ->{f} box () ->{f} Unit + | Required: () -> box () ->? Unit | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/eta.scala b/tests/neg-custom-args/captures/eta.scala index 3d9d759d2203..5cc0196a04c6 100644 --- a/tests/neg-custom-args/captures/eta.scala +++ b/tests/neg-custom-args/captures/eta.scala @@ -1,7 +1,7 @@ - type Proc = (() -> Unit) - def foo(f: {*} Proc): {} Proc = - def bar[A <: {f} Proc](g: () -> A): () -> {f} Proc = + type Proc = () -> Unit + def foo(f: Proc^): Proc^{} = + def bar[A <: Proc^{f}](g: () -> A): () -> Proc^{f} = g // error - val stowaway: () -> {f} Proc = + val stowaway: () -> Proc^{f} = bar( () => f ) // error () => { stowaway.apply().apply() } \ No newline at end of file diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check new file mode 100644 index 000000000000..8dca91bc8e43 --- /dev/null +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -0,0 +1,17 @@ +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- +2 |class Err extends Exception: // error + |^ + |reference (scala.caps.cap : Any) is not included in allowed capture set {} of pure base class class Throwable +3 | self: Err^ => +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:10:6 ---------------------------------------------- +10 |class Err4(c: Any^) extends AnyVal // error + |^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |reference (Err4.this.c : Any^) is not included in allowed capture set {} of pure base class class AnyVal +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- +7 | val x = c // error + | ^ + |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- +8 | class Err3(c: Any^) extends Exception // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | reference (Err3.this.c : Any^) is not included in allowed capture set {} of pure base class class Throwable diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala new file mode 100644 index 000000000000..996f64ae4bd1 --- /dev/null +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -0,0 +1,12 @@ + +class Err extends Exception: // error + self: Err^ => + +def test(c: Any^) = + class Err2 extends Exception: + val x = c // error + class Err3(c: Any^) extends Exception // error + +class Err4(c: Any^) extends AnyVal // error + + diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala new file mode 100644 index 000000000000..830563f51de3 --- /dev/null +++ b/tests/neg-custom-args/captures/filevar.scala @@ -0,0 +1,18 @@ +import language.experimental.captureChecking +import compiletime.uninitialized + +class File: + def write(x: String): Unit = ??? + +class Service: + var file: File^ = uninitialized // error + def log = file.write("log") + +def withFile[T](op: (f: File^) => T): T = + op(new File) + +def test = + withFile: f => + val o = Service() + o.file = f + o.log diff --git a/tests/neg-custom-args/captures/heal-tparam-cs.scala b/tests/neg-custom-args/captures/heal-tparam-cs.scala new file mode 100644 index 000000000000..58d12f8b6ce5 --- /dev/null +++ b/tests/neg-custom-args/captures/heal-tparam-cs.scala @@ -0,0 +1,33 @@ +import language.experimental.captureChecking + +trait Cap { def use(): Unit } + +def localCap[sealed T](op: (cap: Cap^{cap}) => T): T = ??? + +def main(io: Cap^{cap}, net: Cap^{cap}): Unit = { + val test1 = localCap { cap => // error + () => { cap.use() } + } + + val test2: (cap: Cap^{cap}) -> () ->{cap} Unit = + localCap { cap => // should work + (cap1: Cap^{cap}) => () => { cap1.use() } + } + + val test3: (cap: Cap^{io}) -> () ->{io} Unit = + localCap { cap => // should work + (cap1: Cap^{io}) => () => { cap1.use() } + } + + val test4: (cap: Cap^{io}) -> () ->{net} Unit = + localCap { cap => // error + (cap1: Cap^{io}) => () => { cap1.use() } + } + + def localCap2[sealed T](op: (cap: Cap^{io}) => T): T = ??? + + val test5: () ->{io} Unit = + localCap2 { cap => // ok + () => { cap.use() } + } +} diff --git a/tests/neg-custom-args/captures/i15049.scala b/tests/neg-custom-args/captures/i15049.scala index 4e32172c025d..d978e0e1ad0f 100644 --- a/tests/neg-custom-args/captures/i15049.scala +++ b/tests/neg-custom-args/captures/i15049.scala @@ -1,10 +1,10 @@ class Session: def request = "Response" class Foo: - private val session: {*} Session = new Session - def withSession[T](f: ({*} Session) => T): T = f(session) + private val session: Session^{cap} = new Session + def withSession[sealed T](f: (Session^{cap}) => T): T = f(session) -def Test = +def Test: Unit = val f = new Foo f.withSession(s => s).request // error - f.withSession[{*} Session](t => t) // error + f.withSession[Session^](t => t) // error diff --git a/tests/neg-custom-args/captures/i15116.check b/tests/neg-custom-args/captures/i15116.check index 83c552087646..4b637a7c2e40 100644 --- a/tests/neg-custom-args/captures/i15116.check +++ b/tests/neg-custom-args/captures/i15116.check @@ -2,27 +2,27 @@ 3 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Bar.this.m} Foo{m: {Bar.this.m} String} + | Foo{val m: String^{Bar.this.m}}^{Bar.this.m} | with non-empty capture set {Bar.this.m}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:5:6 -------------------------------------------------------------- 5 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Baz.this} Foo{m: {Baz.this} String} + | Foo{val m: String^}^{Baz.this} | with non-empty capture set {Baz.this}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:7:6 -------------------------------------------------------------- 7 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Bar1.this.m} Foo{m: {Bar1.this.m} String} + | Foo{val m: String^{Bar1.this.m}}^{Bar1.this.m} | with non-empty capture set {Bar1.this.m}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:9:6 -------------------------------------------------------------- 9 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Baz2.this} Foo{m: {Baz2.this} String} + | Foo{val m: String^}^{Baz2.this} | with non-empty capture set {Baz2.this}. | The type needs to be declared explicitly. diff --git a/tests/neg-custom-args/captures/i15116.scala b/tests/neg-custom-args/captures/i15116.scala index 1659f251df3e..c4dc6c88d56c 100644 --- a/tests/neg-custom-args/captures/i15116.scala +++ b/tests/neg-custom-args/captures/i15116.scala @@ -1,9 +1,9 @@ -class Foo(m: {*} String) -class Bar(val m: {*} String): +class Foo(m: String^) +class Bar(val m: String^): val x = Foo(m) // error -trait Baz(val m: {*} String): +trait Baz(val m: String^): val x = Foo(m) // error -class Bar1(m: {*} String): +class Bar1(m: String^): val x = Foo(m) // error -trait Baz2(m: {*} String): +trait Baz2(m: String^): val x = Foo(m) // error diff --git a/tests/neg-custom-args/captures/i15749.scala b/tests/neg-custom-args/captures/i15749.scala deleted file mode 100644 index 00d1811498f7..000000000000 --- a/tests/neg-custom-args/captures/i15749.scala +++ /dev/null @@ -1,15 +0,0 @@ -class Unit -object unit extends Unit - -type Top = {*} Any - -type LazyVal[T] = {*} Unit -> T - -class Foo[T](val x: T) - -// Foo[□ {*} Unit -> T] -type BoxedLazyVal[T] = Foo[LazyVal[T]] - -def force[A](v: BoxedLazyVal[A]): A = - // Γ ⊢ v.x : □ {*} Unit -> A - v.x(unit) // error: (unbox v.x)(unit), where (unbox v.x) should be untypable \ No newline at end of file diff --git a/tests/neg-custom-args/captures/i15749a.scala b/tests/neg-custom-args/captures/i15749a.scala deleted file mode 100644 index 9e439e28e98c..000000000000 --- a/tests/neg-custom-args/captures/i15749a.scala +++ /dev/null @@ -1,21 +0,0 @@ -class Unit -object unit extends Unit - -type Top = {*} Any - -type Wrapper[T] = [X] -> (op: {*} T -> X) -> X - -def test = - - def wrapper[T](x: T): Wrapper[T] = - [X] => (op: {*} T -> X) => op(x) - - def strictMap[A <: Top, B <: Top](mx: Wrapper[A])(f: {*} A -> B): Wrapper[B] = - mx((x: A) => wrapper(f(x))) - - def force[A](thunk: {*} Unit -> A): A = thunk(unit) - - def forceWrapper[A](mx: Wrapper[{*} Unit -> A]): Wrapper[A] = - // Γ ⊢ mx: Wrapper[□ {*} Unit => A] - // `force` should be typed as ∀(□ {*} Unit -> A) A, but it can not - strictMap[{*} Unit -> A, A](mx)(t => force[A](t)) // error diff --git a/tests/neg-custom-args/captures/i15772.check b/tests/neg-custom-args/captures/i15772.check index 765586ac5e27..949f7ca48588 100644 --- a/tests/neg-custom-args/captures/i15772.check +++ b/tests/neg-custom-args/captures/i15772.check @@ -1,28 +1,28 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:20:49 --------------------------------------- -20 | val boxed1 : (({*} C) => Unit) -> Unit = box1(c) // error - | ^^^^^^^ - | Found: {c} ({*} ({c} C{arg: {*} C}) -> Unit) -> Unit - | Required: (({*} C) => Unit) -> Unit +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:20:46 --------------------------------------- +20 | val boxed1 : ((C^) => Unit) -> Unit = box1(c) // error + | ^^^^^^^ + | Found: (C{val arg: C^}^{c} => Unit) ->{c} Unit + | Required: (C^ => Unit) -> Unit | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:27:38 --------------------------------------- -27 | val boxed2 : Observe[{*} C] = box2(c) // error - | ^^^^^^^ - | Found: {c} ({*} ({c} C{arg: {*} C}) -> Unit) -> Unit - | Required: Observe[{*} C] +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:27:35 --------------------------------------- +27 | val boxed2 : Observe[C^] = box2(c) // error + | ^^^^^^^ + | Found: (C{val arg: C^}^{c} => Unit) ->{c} Unit + | Required: Observe[C^] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:33:37 --------------------------------------- -33 | val boxed2 : Observe[{*} C] = box2(c) // error - | ^ - | Found: {*} C - | Required: box {*} C{arg: ? C} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:33:33 --------------------------------------- +33 | val boxed2 : Observe[C]^ = box2(c) // error + | ^^^^^^^ + | Found: (C{val arg: C^}^ => Unit) ->? Unit + | Required: (C => Unit) => Unit | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:44:2 ---------------------------------------- 44 | x: (() -> Unit) // error | ^ - | Found: {x} () -> Unit + | Found: () ->{x} Unit | Required: () -> Unit | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i15772.scala b/tests/neg-custom-args/captures/i15772.scala index d3afdb6c63f1..e4efb6b9ccab 100644 --- a/tests/neg-custom-args/captures/i15772.scala +++ b/tests/neg-custom-args/captures/i15772.scala @@ -1,6 +1,6 @@ type Observe[T] = (T => Unit) -> Unit -def unsafe(cap: {*} C) = cap.bad() +def unsafe(cap: C^) = cap.bad() def box1[T](v: T) : (T => Unit) -> Unit = { (fn: T => Unit) => fn(v) @@ -10,35 +10,35 @@ def box2[T](v: T) : Observe[T] = { (fn: T => Unit) => fn(v) } -class C(val arg: {*} C) { +class C(val arg: C^) { def bad() = println("I've gone bad!") } -def main1(x: {*} C) : () -> Int = +def main1(x: C^) : () -> Int = () => - val c : {x} C = new C(x) - val boxed1 : (({*} C) => Unit) -> Unit = box1(c) // error - boxed1((cap: {*} C) => unsafe(c)) + val c : C^{x} = new C(x) + val boxed1 : ((C^) => Unit) -> Unit = box1(c) // error + boxed1((cap: C^) => unsafe(c)) 0 -def main2(x: {*} C) : () -> Int = +def main2(x: C^) : () -> Int = () => - val c : {x} C = new C(x) - val boxed2 : Observe[{*} C] = box2(c) // error - boxed2((cap: {*} C) => unsafe(c)) + val c : C^{x} = new C(x) + val boxed2 : Observe[C^] = box2(c) // error + boxed2((cap: C^) => unsafe(c)) 0 -def main3(x: {*} C) = - def c : {*} C = new C(x) - val boxed2 : Observe[{*} C] = box2(c) // error - boxed2((cap: {*} C) => unsafe(c)) +def main3(x: C^) = + def c : C^ = new C(x) + val boxed2 : Observe[C]^ = box2(c) // error + boxed2((cap: C^) => unsafe(c)) 0 trait File: def write(s: String): Unit -def main(io: {*} Any) = - val sayHello: (({io} File) => Unit) = (file: {io} File) => file.write("Hello World!\r\n") - val filesList : List[{io} File] = ??? +def main(io: Any^) = + val sayHello: ((File^{io}) => Unit) = (file: File^{io}) => file.write("Hello World!\r\n") + val filesList : List[File]^{io} = ??? val x = () => filesList.foreach(sayHello) x: (() -> Unit) // error diff --git a/tests/neg-custom-args/captures/i15921.scala b/tests/neg-custom-args/captures/i15921.scala new file mode 100644 index 000000000000..233ef23991fc --- /dev/null +++ b/tests/neg-custom-args/captures/i15921.scala @@ -0,0 +1,12 @@ +trait Stream { def close(): Unit = (); def write(x: Any): Unit = () } + +object Test { + def usingLogFile[T](op: (c: Stream^) => T): T = + val logFile = new Stream { } + val result = op(logFile) + logFile.close() + result + + val later = usingLogFile { f => () => f.write(0) } // error + later() // writing to closed file! +} diff --git a/tests/neg-custom-args/captures/i15923-cases.scala b/tests/neg-custom-args/captures/i15923-cases.scala deleted file mode 100644 index 5fbb95355a60..000000000000 --- a/tests/neg-custom-args/captures/i15923-cases.scala +++ /dev/null @@ -1,15 +0,0 @@ -trait Cap { def use(): Int } -type Id[X] = [T] -> (op: X => T) -> T -def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) - -def foo(x: Id[{*} Cap]) = { - x(_.use()) // error -} - -def bar(io: {*} Cap, x: Id[{io} Cap]) = { - x(_.use()) -} - -def barAlt(a: {*} Cap, b: {*} Cap, x: Id[{a, b} Cap]) = { - x(_.use()) -} diff --git a/tests/neg-custom-args/captures/i15923.scala b/tests/neg-custom-args/captures/i15923.scala index ac7ee995150e..3994b34f5928 100644 --- a/tests/neg-custom-args/captures/i15923.scala +++ b/tests/neg-custom-args/captures/i15923.scala @@ -3,12 +3,12 @@ type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) def bar() = { - def withCap[X](op: ({*} Cap) => X): X = { - val cap: {*} Cap = new Cap { def use() = { println("cap is used"); 0 } } + def withCap[sealed X](op: (Cap^) => X): X = { + val cap: Cap^ = new Cap { def use() = { println("cap is used"); 0 } } val result = op(cap) result } - val leak = withCap(cap => mkId(cap)) - leak { cap => cap.use() } // error -} + val leak = withCap(cap => mkId(cap)) // error + leak { cap => cap.use() } +} \ No newline at end of file diff --git a/tests/neg-custom-args/captures/i16114.scala b/tests/neg-custom-args/captures/i16114.scala index cc491226f9df..d22c7f02d5fb 100644 --- a/tests/neg-custom-args/captures/i16114.scala +++ b/tests/neg-custom-args/captures/i16114.scala @@ -1,46 +1,46 @@ trait Cap { def use(): Int; def close(): Unit } -def mkCap(): {*} Cap = ??? +def mkCap(): Cap^ = ??? def expect[T](x: T): x.type = x -def withCap[T](op: ({*} Cap) => T): T = { - val cap: {*} Cap = mkCap() +def withCap[T](op: Cap^ => T): T = { + val cap: Cap^ = mkCap() val result = op(cap) cap.close() result } -def main(fs: {*} Cap): Unit = { - def badOp(io: {*} Cap): {} Unit -> Unit = { - val op1: {io} Unit -> Unit = (x: Unit) => // error // limitation - expect[{*} Cap] { +def main(fs: Cap^): Unit = { + def badOp(io: Cap^{cap}): Unit ->{} Unit = { + val op1: Unit ->{io} Unit = (x: Unit) => // error // limitation + expect[Cap^] { io.use() fs } - val op2: {fs} Unit -> Unit = (x: Unit) => // error // limitation - expect[{*} Cap] { + val op2: Unit ->{fs} Unit = (x: Unit) => // error // limitation + expect[Cap^] { fs.use() io } - val op3: {io} Unit -> Unit = (x: Unit) => // ok - expect[{*} Cap] { + val op3: Unit ->{io} Unit = (x: Unit) => // ok + expect[Cap^] { io.use() io } - val op4: {} Unit -> Unit = (x: Unit) => // ok - expect[{*} Cap](io) + val op4: Unit ->{} Unit = (x: Unit) => // ok + expect[Cap^](io) - val op: {} Unit -> Unit = (x: Unit) => // error - expect[{*} Cap] { + val op: Unit -> Unit = (x: Unit) => // error + expect[Cap^] { io.use() io } op } - val leaked: {} Unit -> Unit = withCap(badOp) + val leaked: Unit -> Unit = withCap(badOp) leaked(()) } diff --git a/tests/neg-custom-args/captures/impurefuns.scala b/tests/neg-custom-args/captures/impurefuns.scala new file mode 100644 index 000000000000..d15d9a466307 --- /dev/null +++ b/tests/neg-custom-args/captures/impurefuns.scala @@ -0,0 +1,3 @@ +def f(x: Object^): Any = + val f: Int =>{x} Int = ??? // error // error // error + f diff --git a/tests/neg-custom-args/captures/inner-classes.scala b/tests/neg-custom-args/captures/inner-classes.scala index cf4073b36f81..181b830e4996 100644 --- a/tests/neg-custom-args/captures/inner-classes.scala +++ b/tests/neg-custom-args/captures/inner-classes.scala @@ -5,21 +5,21 @@ object test: def foo(fs: FileSystem) = trait LazyList[+A]: - this: {fs} LazyList[A] => + this: LazyList[A]^{fs} => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? - final class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: // error + final class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: // error def isEmpty = false def head = x - def tail: {this} LazyList[T] = xs() + def tail: LazyList[T]^{this} = xs() end LazyCons new LazyCons(1, () => LazyNil) diff --git a/tests/neg-custom-args/captures/io.scala b/tests/neg-custom-args/captures/io.scala index ae686d6b154e..f481bf357fc8 100644 --- a/tests/neg-custom-args/captures/io.scala +++ b/tests/neg-custom-args/captures/io.scala @@ -3,17 +3,17 @@ sealed trait IO: def puts(msg: Any): Unit = println(msg) def test1 = - val IO : IO @retains(caps.*) = new IO {} + val IO : IO @retains(caps.cap) = new IO {} def foo = {IO; IO.puts("hello") } val x : () -> Unit = () => foo // error: Found: (() -> Unit) retains IO; Required: () -> Unit def test2 = - val IO : IO @retains(caps.*) = new IO {} - def puts(msg: Any, io: IO @retains(caps.*)) = println(msg) + val IO : IO @retains(caps.cap) = new IO {} + def puts(msg: Any, io: IO @retains(caps.cap)) = println(msg) def foo() = puts("hello", IO) val x : () -> Unit = () => foo() // error: Found: (() -> Unit) retains IO; Required: () -> Unit -type Capability[T] = T @retains(caps.*) +type Capability[T] = T @retains(caps.cap) def test3 = val IO : Capability[IO] = new IO {} diff --git a/tests/neg-custom-args/captures/lazylist.check b/tests/neg-custom-args/captures/lazylist.check index e43538ad97f7..4b7611fc3fb7 100644 --- a/tests/neg-custom-args/captures/lazylist.check +++ b/tests/neg-custom-args/captures/lazylist.check @@ -1,42 +1,42 @@ --- [E163] Declaration Error: tests/neg-custom-args/captures/lazylist.scala:22:6 ---------------------------------------- -22 | def tail: {*} LazyList[Nothing] = ??? // error overriding - | ^ - | error overriding method tail in class LazyList of type -> lazylists.LazyList[Nothing]; - | method tail of type -> {*} lazylists.LazyList[Nothing] has incompatible type +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:17:15 ------------------------------------- +17 | def tail = xs() // error + | ^^^^ + | Found: lazylists.LazyList[T]^{LazyCons.this.xs} + | Required: lazylists.LazyList[T] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:35:29 ------------------------------------- 35 | val ref1c: LazyList[Int] = ref1 // error | ^^^^ - | Found: (ref1 : {cap1} lazylists.LazyCons[Int]{xs: {cap1} () -> {*} lazylists.LazyList[Int]}) + | Found: (ref1 : lazylists.LazyCons[Int]{val xs: () ->{cap1} lazylists.LazyList[Int]^}^{cap1}) | Required: lazylists.LazyList[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:37:36 ------------------------------------- -37 | val ref2c: {ref1} LazyList[Int] = ref2 // error +37 | val ref2c: LazyList[Int]^{ref1} = ref2 // error | ^^^^ - | Found: (ref2 : {cap2, ref1} lazylists.LazyList[Int]) - | Required: {ref1} lazylists.LazyList[Int] + | Found: (ref2 : lazylists.LazyList[Int]^{cap2, ref1}) + | Required: lazylists.LazyList[Int]^{ref1} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:39:36 ------------------------------------- -39 | val ref3c: {cap2} LazyList[Int] = ref3 // error +39 | val ref3c: LazyList[Int]^{cap2} = ref3 // error | ^^^^ - | Found: (ref3 : {cap2, ref1} lazylists.LazyList[Int]) - | Required: {cap2} lazylists.LazyList[Int] + | Found: (ref3 : lazylists.LazyList[Int]^{cap2, ref1}) + | Required: lazylists.LazyList[Int]^{cap2} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:41:48 ------------------------------------- -41 | val ref4c: {cap1, ref3, cap3} LazyList[Int] = ref4 // error +41 | val ref4c: LazyList[Int]^{cap1, ref3, cap3} = ref4 // error | ^^^^ - | Found: (ref4 : {cap3, cap2, ref1, cap1} lazylists.LazyList[Int]) - | Required: {cap1, ref3, cap3} lazylists.LazyList[Int] + | Found: (ref4 : lazylists.LazyList[Int]^{cap3, cap2, ref1, cap1}) + | Required: lazylists.LazyList[Int]^{cap1, ref3, cap3} + | + | longer explanation available when compiling with `-explain` +-- [E164] Declaration Error: tests/neg-custom-args/captures/lazylist.scala:22:6 ---------------------------------------- +22 | def tail: LazyList[Nothing]^ = ??? // error overriding + | ^ + | error overriding method tail in class LazyList of type -> lazylists.LazyList[Nothing]; + | method tail of type -> lazylists.LazyList[Nothing]^ has incompatible type | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/lazylist.scala:17:6 ----------------------------------------------------------- -17 | def tail = xs() // error: cannot have an inferred type - | ^^^^^^^^^^^^^^^ - | Non-local method tail cannot have an inferred result type - | {LazyCons.this.xs} lazylists.LazyList[? T] - | with non-empty capture set {LazyCons.this.xs}. - | The type needs to be declared explicitly. diff --git a/tests/neg-custom-args/captures/lazylist.scala b/tests/neg-custom-args/captures/lazylist.scala index 56bfc3ea6da2..e6e4d003f7ae 100644 --- a/tests/neg-custom-args/captures/lazylist.scala +++ b/tests/neg-custom-args/captures/lazylist.scala @@ -1,31 +1,31 @@ package lazylists abstract class LazyList[+T]: - this: ({*} LazyList[T]) => + this: LazyList[T]^ => def isEmpty: Boolean def head: T def tail: LazyList[T] - def map[U](f: T => U): {f, this} LazyList[U] = + def map[U](f: T => U): LazyList[U]^{f, this} = if isEmpty then LazyNil else LazyCons(f(head), () => tail.map(f)) -class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: +class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: def isEmpty = false def head = x - def tail = xs() // error: cannot have an inferred type + def tail = xs() // error object LazyNil extends LazyList[Nothing]: def isEmpty = true def head = ??? - def tail: {*} LazyList[Nothing] = ??? // error overriding + def tail: LazyList[Nothing]^ = ??? // error overriding -def map[A, B](xs: {*} LazyList[A], f: A => B): {f, xs} LazyList[B] = +def map[A, B](xs: LazyList[A]^, f: A => B): LazyList[B]^{f, xs} = xs.map(f) class CC -type Cap = {*} CC +type Cap = CC^ def test(cap1: Cap, cap2: Cap, cap3: Cap) = def f[T](x: LazyList[T]): LazyList[T] = if cap1 == cap1 then x else LazyNil @@ -34,8 +34,8 @@ def test(cap1: Cap, cap2: Cap, cap3: Cap) = val ref1 = LazyCons(1, () => f(LazyNil)) val ref1c: LazyList[Int] = ref1 // error val ref2 = map(ref1, g) - val ref2c: {ref1} LazyList[Int] = ref2 // error + val ref2c: LazyList[Int]^{ref1} = ref2 // error val ref3 = ref1.map(g) - val ref3c: {cap2} LazyList[Int] = ref3 // error + val ref3c: LazyList[Int]^{cap2} = ref3 // error val ref4 = (if cap1 == cap2 then ref1 else ref2).map(h) - val ref4c: {cap1, ref3, cap3} LazyList[Int] = ref4 // error + val ref4c: LazyList[Int]^{cap1, ref3, cap3} = ref4 // error diff --git a/tests/neg-custom-args/captures/lazylists-exceptions.check b/tests/neg-custom-args/captures/lazylists-exceptions.check index bd6fad047fe9..f58ed265d3be 100644 --- a/tests/neg-custom-args/captures/lazylists-exceptions.check +++ b/tests/neg-custom-args/captures/lazylists-exceptions.check @@ -1,8 +1,9 @@ -- Error: tests/neg-custom-args/captures/lazylists-exceptions.scala:36:2 ----------------------------------------------- 36 | try // error | ^ - | The expression's type {*} LazyList[Int] is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. + | Result of `try` cannot have type LazyList[Int]^ since + | that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. 37 | tabulate(10) { i => 38 | if i > 9 then throw Ex1() 39 | i * i diff --git a/tests/neg-custom-args/captures/lazylists-exceptions.scala b/tests/neg-custom-args/captures/lazylists-exceptions.scala index 6cba934d61e8..6a72facf7285 100644 --- a/tests/neg-custom-args/captures/lazylists-exceptions.scala +++ b/tests/neg-custom-args/captures/lazylists-exceptions.scala @@ -1,31 +1,31 @@ import language.experimental.saferExceptions trait LazyList[+A]: - this: {*} LazyList[A] => + this: LazyList[A]^ => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -final class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: - this: {*} LazyList[T] => +final class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: + this: LazyList[T]^ => def isEmpty = false def head = x - def tail: {this} LazyList[T] = xs() + def tail: LazyList[T]^{this} = xs() end LazyCons extension [A](x: A) - def #:(xs1: => {*} LazyList[A]): {xs1} LazyList[A] = + def #:(xs1: => LazyList[A]^): LazyList[A]^{xs1} = LazyCons(x, () => xs1) -def tabulate[A](n: Int)(gen: Int => A): {gen} LazyList[A] = - def recur(i: Int): {gen} LazyList[A] = +def tabulate[A](n: Int)(gen: Int => A): LazyList[A]^{gen} = + def recur(i: Int): LazyList[A]^{gen} = if i == n then LazyNil else gen(i) #: recur(i + 1) recur(0) diff --git a/tests/neg-custom-args/captures/lazylists1.check b/tests/neg-custom-args/captures/lazylists1.check index f91e2500dc15..127a0563c3c9 100644 --- a/tests/neg-custom-args/captures/lazylists1.check +++ b/tests/neg-custom-args/captures/lazylists1.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists1.scala:25:66 ----------------------------------- -25 | def concat(other: {f} LazyList[A]): {this, f} LazyList[A] = ??? : ({xs, f} LazyList[A]) // error +25 | def concat(other: LazyList[A]^{f}): LazyList[A]^{this, f} = ??? : (LazyList[A]^{xs, f}) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Found: {xs, f} LazyList[A] - | Required: {Mapped.this, f} LazyList[A] + | Found: LazyList[A]^{xs, f} + | Required: LazyList[A]^{Mapped.this, f} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/lazylists1.scala b/tests/neg-custom-args/captures/lazylists1.scala index c6475223b783..99472c13ebec 100644 --- a/tests/neg-custom-args/captures/lazylists1.scala +++ b/tests/neg-custom-args/captures/lazylists1.scala @@ -1,27 +1,27 @@ class CC -type Cap = {*} CC +type Cap = CC^{cap} trait LazyList[+A]: - this: ({*} LazyList[A]) => + this: LazyList[A]^{cap} => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {xs, f} LazyList[B] = +extension [A](xs: LazyList[A]^{cap}) + def map[B](f: A => B): LazyList[B]^{xs, f} = final class Mapped extends LazyList[B]: - this: ({xs, f} Mapped) => + this: (Mapped^{xs, f}) => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) // OK - def drop(n: Int): {this} LazyList[B] = ??? : ({xs, f} LazyList[B]) // OK - def concat(other: {f} LazyList[A]): {this, f} LazyList[A] = ??? : ({xs, f} LazyList[A]) // error + def tail: LazyList[B]^{this} = xs.tail.map(f) // OK + def drop(n: Int): LazyList[B]^{this} = ??? : (LazyList[B]^{xs, f}) // OK + def concat(other: LazyList[A]^{f}): LazyList[A]^{this, f} = ??? : (LazyList[A]^{xs, f}) // error new Mapped diff --git a/tests/neg-custom-args/captures/lazylists2.check b/tests/neg-custom-args/captures/lazylists2.check index 41881b57da24..72efbc08f8e2 100644 --- a/tests/neg-custom-args/captures/lazylists2.check +++ b/tests/neg-custom-args/captures/lazylists2.check @@ -1,31 +1,24 @@ --- [E163] Declaration Error: tests/neg-custom-args/captures/lazylists2.scala:50:10 ------------------------------------- -50 | def tail: {xs, f} LazyList[B] = xs.tail.map(f) // error - | ^ - | error overriding method tail in trait LazyList of type -> {Mapped.this} LazyList[B]; - | method tail of type -> {xs, f} LazyList[B] has incompatible type - | - | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists2.scala:18:4 ------------------------------------ 18 | final class Mapped extends LazyList[B]: // error | ^ - | Found: {f, xs} LazyList[? B] - | Required: {f} LazyList[B] -19 | this: ({xs, f} Mapped) => + | Found: LazyList[B^?]^{f, xs} + | Required: LazyList[B]^{f} +19 | this: (Mapped^{xs, f}) => 20 | def isEmpty = false 21 | def head: B = f(xs.head) -22 | def tail: {this} LazyList[B] = xs.tail.map(f) +22 | def tail: LazyList[B]^{this} = xs.tail.map(f) 23 | new Mapped | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists2.scala:27:4 ------------------------------------ 27 | final class Mapped extends LazyList[B]: // error | ^ - | Found: {f, xs} LazyList[? B] - | Required: {xs} LazyList[B] -28 | this: ({xs, f} Mapped) => + | Found: LazyList[B^?]^{f, xs} + | Required: LazyList[B]^{xs} +28 | this: Mapped^{xs, f} => 29 | def isEmpty = false 30 | def head: B = f(xs.head) -31 | def tail: {this} LazyList[B] = xs.tail.map(f) +31 | def tail: LazyList[B]^{this} = xs.tail.map(f) 32 | new Mapped | | longer explanation available when compiling with `-explain` @@ -33,10 +26,22 @@ 40 | def head: B = f(xs.head) // error | ^ |(f : A => B) cannot be referenced here; it is not included in the allowed capture set {xs} of the self type of class Mapped --- Error: tests/neg-custom-args/captures/lazylists2.scala:41:49 -------------------------------------------------------- -41 | def tail: {this} LazyList[B] = xs.tail.map(f) // error - | ^ +-- Error: tests/neg-custom-args/captures/lazylists2.scala:41:48 -------------------------------------------------------- +41 | def tail: LazyList[B]^{this}= xs.tail.map(f) // error + | ^ |(f : A => B) cannot be referenced here; it is not included in the allowed capture set {xs} of the self type of class Mapped +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists2.scala:45:4 ------------------------------------ +45 | final class Mapped extends LazyList[B]: // error + | ^ + | Found: LazyList[B^?]^{f, xs} + | Required: LazyList[B]^{xs} +46 | this: (Mapped^{xs, f}) => +47 | def isEmpty = false +48 | def head: B = f(xs.head) +49 | def tail: LazyList[B]^{xs, f} = xs.tail.map(f) +50 | new Mapped + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-custom-args/captures/lazylists2.scala:60:10 -------------------------------------------------------- 60 | class Mapped2 extends Mapped: // error | ^ diff --git a/tests/neg-custom-args/captures/lazylists2.scala b/tests/neg-custom-args/captures/lazylists2.scala index 7b661e931441..f6c1cf95a8ed 100644 --- a/tests/neg-custom-args/captures/lazylists2.scala +++ b/tests/neg-custom-args/captures/lazylists2.scala @@ -1,62 +1,62 @@ class CC -type Cap = {*} CC +type Cap = CC^ trait LazyList[+A]: - this: ({*} LazyList[A]) => + this: LazyList[A]^ => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {f} LazyList[B] = +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{f} = final class Mapped extends LazyList[B]: // error - this: ({xs, f} Mapped) => + this: (Mapped^{xs, f}) => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) + def tail: LazyList[B]^{this} = xs.tail.map(f) new Mapped - def map2[B](f: A => B): {xs} LazyList[B] = + def map2[B](f: A => B): LazyList[B]^{xs} = final class Mapped extends LazyList[B]: // error - this: ({xs, f} Mapped) => + this: Mapped^{xs, f} => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) + def tail: LazyList[B]^{this} = xs.tail.map(f) new Mapped - def map3[B](f: A => B): {xs} LazyList[B] = + def map3[B](f: A => B): LazyList[B]^{xs} = final class Mapped extends LazyList[B]: - this: ({xs} Mapped) => + this: Mapped^{xs} => def isEmpty = false def head: B = f(xs.head) // error - def tail: {this} LazyList[B] = xs.tail.map(f) // error + def tail: LazyList[B]^{this}= xs.tail.map(f) // error new Mapped - def map4[B](f: A => B): {xs} LazyList[B] = - final class Mapped extends LazyList[B]: - this: ({xs, f} Mapped) => + def map4[B](f: A => B): LazyList[B]^{xs} = + final class Mapped extends LazyList[B]: // error + this: (Mapped^{xs, f}) => def isEmpty = false def head: B = f(xs.head) - def tail: {xs, f} LazyList[B] = xs.tail.map(f) // error + def tail: LazyList[B]^{xs, f} = xs.tail.map(f) new Mapped def map5[B](f: A => B): LazyList[B] = class Mapped extends LazyList[B]: - this: ({xs, f} Mapped) => + this: (Mapped^{xs, f}) => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) + def tail: LazyList[B]^{this} = xs.tail.map(f) class Mapped2 extends Mapped: // error this: Mapped => new Mapped2 diff --git a/tests/neg-custom-args/captures/lazyref.check b/tests/neg-custom-args/captures/lazyref.check index fcd98d0d67bd..8c91ec13b5d8 100644 --- a/tests/neg-custom-args/captures/lazyref.check +++ b/tests/neg-custom-args/captures/lazyref.check @@ -1,28 +1,28 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:19:28 -------------------------------------- 19 | val ref1c: LazyRef[Int] = ref1 // error | ^^^^ - | Found: (ref1 : {cap1} LazyRef[Int]{elem: {cap1} () -> Int}) + | Found: (ref1 : LazyRef[Int]{val elem: () ->{cap1} Int}^{cap1}) | Required: LazyRef[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:21:35 -------------------------------------- -21 | val ref2c: {cap2} LazyRef[Int] = ref2 // error +21 | val ref2c: LazyRef[Int]^{cap2} = ref2 // error | ^^^^ - | Found: (ref2 : {cap2, ref1} LazyRef[Int]{elem: {*} () -> Int}) - | Required: {cap2} LazyRef[Int] + | Found: (ref2 : LazyRef[Int]{val elem: () => Int}^{cap2, ref1}) + | Required: LazyRef[Int]^{cap2} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:23:35 -------------------------------------- -23 | val ref3c: {ref1} LazyRef[Int] = ref3 // error +23 | val ref3c: LazyRef[Int]^{ref1} = ref3 // error | ^^^^ - | Found: (ref3 : {cap2, ref1} LazyRef[Int]{elem: {*} () -> Int}) - | Required: {ref1} LazyRef[Int] + | Found: (ref3 : LazyRef[Int]{val elem: () => Int}^{cap2, ref1}) + | Required: LazyRef[Int]^{ref1} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:25:35 -------------------------------------- -25 | val ref4c: {cap1} LazyRef[Int] = ref4 // error +25 | val ref4c: LazyRef[Int]^{cap1} = ref4 // error | ^^^^ - | Found: (ref4 : {cap2, cap1} LazyRef[Int]{elem: {*} () -> Int}) - | Required: {cap1} LazyRef[Int] + | Found: (ref4 : LazyRef[Int]{val elem: () => Int}^{cap2, cap1}) + | Required: LazyRef[Int]^{cap1} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/lazyref.scala b/tests/neg-custom-args/captures/lazyref.scala index 8395e5cb42cd..99aa10d5d2b2 100644 --- a/tests/neg-custom-args/captures/lazyref.scala +++ b/tests/neg-custom-args/captures/lazyref.scala @@ -1,15 +1,15 @@ class CC -type Cap = {*} CC +type Cap = CC^ class LazyRef[T](val elem: () => T): val get: () => T = elem - def map[U](f: T => U): {f, this} LazyRef[U] = + def map[U](f: T => U): LazyRef[U]^{f, this} = new LazyRef(() => f(elem())) -def map[A, B](ref: {*} LazyRef[A], f: A => B): {f, ref} LazyRef[B] = +def map[A, B](ref: LazyRef[A]^, f: A => B): LazyRef[B]^{f, ref} = new LazyRef(() => f(ref.elem())) -def mapc[A, B]: (ref: {*} LazyRef[A], f: A => B) -> {f, ref} LazyRef[B] = +def mapc[A, B]: (ref: LazyRef[A]^, f: A => B) -> LazyRef[B]^{f, ref} = (ref1, f1) => map[A, B](ref1, f1) def test(cap1: Cap, cap2: Cap) = @@ -18,8 +18,8 @@ def test(cap1: Cap, cap2: Cap) = val ref1 = LazyRef(() => f(0)) val ref1c: LazyRef[Int] = ref1 // error val ref2 = map(ref1, g) - val ref2c: {cap2} LazyRef[Int] = ref2 // error + val ref2c: LazyRef[Int]^{cap2} = ref2 // error val ref3 = ref1.map(g) - val ref3c: {ref1} LazyRef[Int] = ref3 // error + val ref3c: LazyRef[Int]^{ref1} = ref3 // error val ref4 = (if cap1 == cap2 then ref1 else ref2).map(g) - val ref4c: {cap1} LazyRef[Int] = ref4 // error + val ref4c: LazyRef[Int]^{cap1} = ref4 // error diff --git a/tests/neg-custom-args/captures/nestedclass.check b/tests/neg-custom-args/captures/nestedclass.check index cb4421ece0ec..2987318caf4f 100644 --- a/tests/neg-custom-args/captures/nestedclass.check +++ b/tests/neg-custom-args/captures/nestedclass.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/nestedclass.scala:15:15 ---------------------------------- 15 | val xsc: C = xs // error | ^^ - | Found: (xs : {cap1} C) + | Found: (xs : C^{cap1}) | Required: C | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/nestedclass.scala b/tests/neg-custom-args/captures/nestedclass.scala index 38adf7998868..0581f9ce9b2d 100644 --- a/tests/neg-custom-args/captures/nestedclass.scala +++ b/tests/neg-custom-args/captures/nestedclass.scala @@ -1,5 +1,5 @@ class CC -type Cap = {*} CC +type Cap = CC^ abstract class C: def head: String diff --git a/tests/neg-custom-args/captures/override-adapt-box-selftype.scala b/tests/neg-custom-args/captures/override-adapt-box-selftype.scala new file mode 100644 index 000000000000..f44add78e246 --- /dev/null +++ b/tests/neg-custom-args/captures/override-adapt-box-selftype.scala @@ -0,0 +1,48 @@ +import language.experimental.captureChecking + +class IO +class C + +object Test1 { + abstract class A[X] { this: A[X] => + def foo(x: X): X + } + + def test(io: IO^) = { + class B extends A[C^{io}] { // X =:= {io} C // error + override def foo(x: C^{io}): C^{io} = ??? + } + } +} + +def Test2(io: IO^{cap}, fs: IO^{io}, ct: IO^) = { + abstract class A[X] { this: A[X]^{io} => + def foo(x: X): X + } + + class B1 extends A[C^{io}] { + override def foo(x: C^{io}): C^{io} = ??? + } + + class B2 extends A[C^{ct}] { // error + override def foo(x: C^{ct}): C^{ct} = ??? + } + + class B3 extends A[C^{fs}] { + override def foo(x: C^{fs}): C^{fs} = ??? + } +} + +def Test3(io: IO^, ct: IO^) = { + abstract class A[X] { this: A[X]^ => + def foo(x: X): X + } + + class B1 extends A[C^{io}] { + override def foo(x: C^{io}): C^{io} = ??? + } + + class B2 extends A[C^{io, ct}] { + override def foo(x: C^{io, ct}): C^{io, ct} = ??? + } +} diff --git a/tests/neg-custom-args/captures/override-adapt-box.scala b/tests/neg-custom-args/captures/override-adapt-box.scala new file mode 100644 index 000000000000..70023dfbc941 --- /dev/null +++ b/tests/neg-custom-args/captures/override-adapt-box.scala @@ -0,0 +1,14 @@ +import language.experimental.captureChecking + +abstract class A[X] { this: A[X]^{} => + def foo(x: X): X +} + +class IO +class C + +def test(io: IO^{cap}) = { + class B extends A[C^{io}] { // X =:= {io} C // error + override def foo(x: C^{io}): C^{io} = ??? + } +} diff --git a/tests/neg-custom-args/captures/override-boxed.scala b/tests/neg-custom-args/captures/override-boxed.scala index 720b50732f61..d66d28d15aaa 100644 --- a/tests/neg-custom-args/captures/override-boxed.scala +++ b/tests/neg-custom-args/captures/override-boxed.scala @@ -1,7 +1,8 @@ + class A -def test(x: {*} Any) = +def test(x: Any^{cap}) = abstract class Getter: - def get(): {x} A - class PolyGetter[T <: {x} A] extends Getter: + def get(): A^{x} + class PolyGetter[T <: A^{x}] extends Getter: override def get(): T = ??? // error diff --git a/tests/neg-custom-args/captures/real-try.check b/tests/neg-custom-args/captures/real-try.check index 9745470f219c..c8df3777bcfa 100644 --- a/tests/neg-custom-args/captures/real-try.check +++ b/tests/neg-custom-args/captures/real-try.check @@ -1,8 +1,15 @@ +-- [E129] Potential Issue Warning: tests/neg-custom-args/captures/real-try.scala:30:4 ---------------------------------- +30 | b.x + | ^^^ + | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-custom-args/captures/real-try.scala:12:2 ----------------------------------------------------------- 12 | try // error | ^ - | The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. + | Result of `try` cannot have type () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. 13 | () => foo(1) 14 | catch 15 | case _: Ex1 => ??? @@ -10,14 +17,20 @@ -- Error: tests/neg-custom-args/captures/real-try.scala:18:2 ----------------------------------------------------------- 18 | try // error | ^ - | The expression's type {*} () -> ? Cell[Unit] is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. + | Result of `try` cannot have type () => Cell[Unit]^? since + | that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. 19 | () => Cell(foo(1)) 20 | catch 21 | case _: Ex1 => ??? 22 | case _: Ex2 => ??? --- Error: tests/neg-custom-args/captures/real-try.scala:30:4 ----------------------------------------------------------- -30 | b.x // error - | ^^^ - | The expression's type box {*} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/real-try.scala:24:10 ---------------------------------------------------------- +24 | val b = try // error + | ^ + | Result of `try` cannot have type Cell[box () => Unit]^? since + | the part box () => Unit of that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. +25 | Cell(() => foo(1))//: Cell[box {ev} () => Unit] <: Cell[box {cap} () => Unit] +26 | catch +27 | case _: Ex1 => ??? +28 | case _: Ex2 => ??? diff --git a/tests/neg-custom-args/captures/real-try.scala b/tests/neg-custom-args/captures/real-try.scala index 94e1eafd9af2..a826fdaa4af7 100644 --- a/tests/neg-custom-args/captures/real-try.scala +++ b/tests/neg-custom-args/captures/real-try.scala @@ -8,7 +8,7 @@ def foo(i: Int): (CanThrow[Ex1], CanThrow[Ex2]) ?-> Unit = class Cell[+T](val x: T) -def test() = +def test(): Unit = try // error () => foo(1) catch @@ -21,10 +21,10 @@ def test() = case _: Ex1 => ??? case _: Ex2 => ??? - val b = try // ok here, but error on use - Cell(() => foo(1))//: Cell[box {ev} () => Unit] <: Cell[box {*} () => Unit] + val b = try // error + Cell(() => foo(1))//: Cell[box {ev} () => Unit] <: Cell[box {cap} () => Unit] catch case _: Ex1 => ??? case _: Ex2 => ??? - b.x // error + b.x diff --git a/tests/neg-custom-args/captures/sealed-leaks.scala b/tests/neg-custom-args/captures/sealed-leaks.scala new file mode 100644 index 000000000000..bf46b52194c1 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-leaks.scala @@ -0,0 +1,20 @@ + +import java.io.* +def Test2 = + + def usingLogFile[sealed T](op: FileOutputStream^ => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result + + val later = usingLogFile { f => () => f.write(0) } // error + val later2 = usingLogFile[(() => Unit) | Null] { f => () => f.write(0) } // error + + var x: (FileOutputStream^) | Null = null // error + def foo(f: FileOutputStream^, g: FileOutputStream^) = + var y = if ??? then f else g // error + + usingLogFile { f => x = f } + + later() \ No newline at end of file diff --git a/tests/neg-custom-args/captures/selftype.scala b/tests/neg-custom-args/captures/selftype.scala new file mode 100644 index 000000000000..21148f625a7a --- /dev/null +++ b/tests/neg-custom-args/captures/selftype.scala @@ -0,0 +1,4 @@ +@annotation.experimental class C(x: () => Unit) extends caps.Pure // error + +@annotation.experimental class D(@annotation.constructorOnly x: () => Unit) extends caps.Pure // ok + diff --git a/tests/neg-custom-args/captures/stack-alloc.scala b/tests/neg-custom-args/captures/stack-alloc.scala index b646c0736f2c..71b544dbe88d 100644 --- a/tests/neg-custom-args/captures/stack-alloc.scala +++ b/tests/neg-custom-args/captures/stack-alloc.scala @@ -5,7 +5,7 @@ class Pooled val stack = mutable.ArrayBuffer[Pooled]() var nextFree = 0 -def withFreshPooled[T](op: ({*} Pooled) => T): T = +def withFreshPooled[sealed T](op: Pooled^ => T): T = if nextFree >= stack.size then stack.append(new Pooled) val pooled = stack(nextFree) nextFree = nextFree + 1 diff --git a/tests/neg-custom-args/captures/try.check b/tests/neg-custom-args/captures/try.check index 30ebb910d34d..4af370bfba1a 100644 --- a/tests/neg-custom-args/captures/try.check +++ b/tests/neg-custom-args/captures/try.check @@ -1,42 +1,37 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:23:49 ------------------------------------------ +-- Error: tests/neg-custom-args/captures/try.scala:23:16 --------------------------------------------------------------- 23 | val a = handle[Exception, CanThrow[Exception]] { // error - | ^ - | Found: ? ({*} CT[Exception]) -> {*} CT[? >: ? Exception <: ? Exception] - | Required: CanThrow[Exception] => box {*} CT[Exception] -24 | (x: CanThrow[Exception]) => x -25 | }{ - | - | longer explanation available when compiling with `-explain` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Sealed type variable R cannot be instantiated to box CT[Exception]^ since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method handle + | leaking as part of its result. -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:29:43 ------------------------------------------ 29 | val b = handle[Exception, () -> Nothing] { // error | ^ - | Found: ? (x: {*} CT[Exception]) -> {x} () -> ? Nothing - | Required: CanThrow[Exception] => () -> Nothing + | Found: (x: CT[Exception]^) ->? () ->{x} Nothing + | Required: (x$0: CanThrow[Exception]) => () -> Nothing 30 | (x: CanThrow[Exception]) => () => raise(new Exception)(using x) 31 | } { | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/try.scala:40:4 ---------------------------------------------------------------- -35 | val xx = handle { -36 | (x: CanThrow[Exception]) => -37 | () => -38 | raise(new Exception)(using x) -39 | 22 -40 | } { // error - | ^ - | The expression's type box {*} () -> Int is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. -41 | (ex: Exception) => () => 22 -42 | } --- Error: tests/neg-custom-args/captures/try.scala:52:2 ---------------------------------------------------------------- -47 |val global = handle { +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:52:2 ------------------------------------------- +47 |val global: () -> Int = handle { 48 | (x: CanThrow[Exception]) => 49 | () => 50 | raise(new Exception)(using x) 51 | 22 -52 |} { // error - | ^ - | The expression's type box {*} () -> Int is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. +52 |} { // error + | ^ + | Found: () ->{x$0} Int + | Required: () -> Int 53 | (ex: Exception) => () => 22 54 |} + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/try.scala:35:11 --------------------------------------------------------------- +35 | val xx = handle { // error + | ^^^^^^ + | Sealed type variable R cannot be instantiated to box () => Int since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method handle + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/try.scala b/tests/neg-custom-args/captures/try.scala index df7930f76af8..3c6f0605d8b9 100644 --- a/tests/neg-custom-args/captures/try.scala +++ b/tests/neg-custom-args/captures/try.scala @@ -2,8 +2,8 @@ import annotation.retains import language.experimental.erasedDefinitions class CT[E <: Exception] -type CanThrow[E <: Exception] = CT[E] @retains(caps.*) -type Top = Any @retains(caps.*) +type CanThrow[E <: Exception] = CT[E] @retains(caps.cap) +type Top = Any @retains(caps.cap) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?=> R @@ -14,7 +14,7 @@ def raise[E <: Exception](e: E): Nothing throws E = throw e def foo(x: Boolean): Int throws Fail = if x then 1 else raise(Fail()) -def handle[E <: Exception, R <: Top](op: CanThrow[E] => R)(handler: E => R): R = +def handle[E <: Exception, sealed R <: Top](op: CanThrow[E] => R)(handler: E => R): R = val x: CanThrow[E] = ??? try op(x) catch case ex: E => handler(ex) @@ -32,23 +32,23 @@ def test = (ex: Exception) => ??? } - val xx = handle { + val xx = handle { // error (x: CanThrow[Exception]) => () => raise(new Exception)(using x) 22 - } { // error + } { (ex: Exception) => () => 22 } val yy = xx :: Nil yy // OK -val global = handle { +val global: () -> Int = handle { (x: CanThrow[Exception]) => () => raise(new Exception)(using x) 22 -} { // error +} { // error (ex: Exception) => () => 22 -} \ No newline at end of file +} diff --git a/tests/neg-custom-args/captures/try3.scala b/tests/neg-custom-args/captures/try3.scala index 8c5bc18bf3be..4c6835353c3f 100644 --- a/tests/neg-custom-args/captures/try3.scala +++ b/tests/neg-custom-args/captures/try3.scala @@ -1,10 +1,10 @@ import java.io.IOException class CT[E] -type CanThrow[E] = {*} CT[E] -type Top = {*} Any +type CanThrow[E] = CT[E]^ +type Top = Any^ -def handle[E <: Exception, T <: Top](op: CanThrow[E] ?=> T)(handler: E => T): T = +def handle[E <: Exception, sealed T <: Top](op: CanThrow[E] ?=> T)(handler: E => T): T = val x: CanThrow[E] = ??? try op(using x) catch case ex: E => handler(ex) @@ -14,12 +14,12 @@ def raise[E <: Exception](ex: E)(using CanThrow[E]): Nothing = @main def Test: Int = def f(a: Boolean) = - handle { + handle { // error if !a then raise(IOException()) (b: Boolean) => if !b then raise(IOException()) 0 - } { // error + } { ex => (b: Boolean) => -1 } val g = f(true) diff --git a/tests/neg-custom-args/captures/unbox.scala b/tests/neg-custom-args/captures/unbox.scala index c615cf1d9176..33702a954068 100644 --- a/tests/neg-custom-args/captures/unbox.scala +++ b/tests/neg-custom-args/captures/unbox.scala @@ -1,4 +1,5 @@ -type Proc = {*} () => Unit +import language.`3.2` +type Proc = () => Unit val xs: List[Proc] = ??? diff --git a/tests/neg-custom-args/captures/usingLogFile-alt.check b/tests/neg-custom-args/captures/usingLogFile-alt.check new file mode 100644 index 000000000000..31e97b7dfda1 --- /dev/null +++ b/tests/neg-custom-args/captures/usingLogFile-alt.check @@ -0,0 +1,7 @@ +-- Error: tests/neg-custom-args/captures/usingLogFile-alt.scala:18:2 --------------------------------------------------- +18 | usingFile( // error + | ^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingFile + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/usingLogFile-alt.scala b/tests/neg-custom-args/captures/usingLogFile-alt.scala new file mode 100644 index 000000000000..6b529ee6f892 --- /dev/null +++ b/tests/neg-custom-args/captures/usingLogFile-alt.scala @@ -0,0 +1,23 @@ +// Reported in issue #17517 + +import language.experimental.captureChecking +import java.io.* + +object Test: + class Logger(f: OutputStream^): + def log(msg: String): Unit = ??? + + def usingFile[sealed T](name: String, op: OutputStream^ => T): T = + val f = new FileOutputStream(name) + val result = op(f) + f.close() + result + + def usingLogger[sealed T](f: OutputStream^)(op: Logger^{f} => T): T = ??? + + usingFile( // error + "foo", + file => { + usingLogger(file)(l => () => l.log("test")) + } + ) diff --git a/tests/neg-custom-args/captures/usingLogFile.check b/tests/neg-custom-args/captures/usingLogFile.check index beb7ac23ed44..d3bc9082202c 100644 --- a/tests/neg-custom-args/captures/usingLogFile.check +++ b/tests/neg-custom-args/captures/usingLogFile.check @@ -1,35 +1,47 @@ --- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:27 ------------------------------------------------------ +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:31:6 ------------------------------------------------------- +31 | var later3: () => Unit = () => () // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Mutable variable later3 cannot have type box () => Unit since + | that type captures the root capability `cap`. + | This restriction serves to prevent local capabilities from escaping the scope where they are defined. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:35:6 ------------------------------------------------------- +35 | var later4: Cell[() => Unit] = Cell(() => ()) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Mutable variable later4 cannot have type Test2.Cell[() => Unit] since + | the part () => Unit of that type captures the root capability `cap`. + | This restriction serves to prevent local capabilities from escaping the scope where they are defined. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:14 ------------------------------------------------------ 23 | val later = usingLogFile { f => () => f.write(0) } // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^ - | {f} () -> Unit cannot be box-converted to box ? () -> Unit - | since one of their capture sets contains the root capability `*` --- Error: tests/neg-custom-args/captures/usingLogFile.scala:29:9 ------------------------------------------------------- -29 | later2.x() // error - | ^^^^^^^^ - | The expression's type box {*} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:33:2 ------------------------------------------------------- -33 | later3() // error - | ^^^^^^ - | box {*} () -> Unit cannot be box-converted to a type that can be selected or applied - | since one of their capture sets contains the root capability `*` --- Error: tests/neg-custom-args/captures/usingLogFile.scala:37:9 ------------------------------------------------------- -37 | later4.x() // error - | ^^^^^^^^ - | The expression's type box {*} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:47:27 ------------------------------------------------------ + | ^^^^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingLogFile + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:28:23 ------------------------------------------------------ +28 | private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error + | ^^^^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box Test2.Cell[() => Unit]^? since + | the part () => Unit of that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingLogFile + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:47:6 ------------------------------------------------------- 47 | val later = usingLogFile { f => () => f.write(0) } // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^ - | {f} () -> Unit cannot be box-converted to box ? () -> Unit - | since one of their capture sets contains the root capability `*` --- Error: tests/neg-custom-args/captures/usingLogFile.scala:62:33 ------------------------------------------------------ + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Non-local value later cannot have an inferred type + | () => Unit + | with non-empty capture set {x$0, cap}. + | The type needs to be declared explicitly. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:62:16 ------------------------------------------------------ 62 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | {f} (x$0: Int) -> Unit cannot be box-converted to box ? (x$0: Int) -> Unit - | since one of their capture sets contains the root capability `*` --- Error: tests/neg-custom-args/captures/usingLogFile.scala:71:37 ------------------------------------------------------ -71 | val later = usingFile("logfile", usingLogger(_, l => () => l.log("test"))) // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | {_$1} () -> Unit cannot be box-converted to box ? () -> Unit - | since one of their capture sets contains the root capability `*` + | ^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box (x$0: Int) => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingFile + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:71:16 ------------------------------------------------------ +71 | val later = usingFile("logfile", // error + | ^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingFile + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/usingLogFile.scala b/tests/neg-custom-args/captures/usingLogFile.scala index 8b367239050d..e7c23573ca6e 100644 --- a/tests/neg-custom-args/captures/usingLogFile.scala +++ b/tests/neg-custom-args/captures/usingLogFile.scala @@ -3,7 +3,7 @@ import annotation.capability object Test1: - def usingLogFile[T](op: FileOutputStream => T): T = + def usingLogFile[sealed T](op: FileOutputStream => T): T = val logFile = FileOutputStream("log") val result = op(logFile) logFile.close() @@ -14,7 +14,7 @@ object Test1: object Test2: - def usingLogFile[T](op: ({*} FileOutputStream) => T): T = + def usingLogFile[sealed T](op: FileOutputStream^ => T): T = val logFile = FileOutputStream("log") val result = op(logFile) logFile.close() @@ -25,20 +25,20 @@ object Test2: class Cell[+T](val x: T) - private val later2 = usingLogFile { f => Cell(() => f.write(0)) } - later2.x() // error + private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error + later2.x() - var later3: () => Unit = () => () + var later3: () => Unit = () => () // error usingLogFile { f => later3 = () => f.write(0) } - later3() // error + later3() - var later4: Cell[() => Unit] = Cell(() => ()) + var later4: Cell[() => Unit] = Cell(() => ()) // error usingLogFile { f => later4 = Cell(() => f.write(0)) } - later4.x() // error + later4.x() object Test3: - def usingLogFile[T](op: ({*} FileOutputStream) => T) = + def usingLogFile[sealed T](op: FileOutputStream^ => T) = val logFile = FileOutputStream("log") val result = op(logFile) logFile.close() @@ -47,10 +47,10 @@ object Test3: val later = usingLogFile { f => () => f.write(0) } // error object Test4: - class Logger(f: {*} OutputStream): + class Logger(f: OutputStream^): def log(msg: String): Unit = ??? - def usingFile[T](name: String, op: ({*} OutputStream) => T): T = + def usingFile[sealed T](name: String, op: OutputStream^ => T): T = val f = new FileOutputStream(name) val result = op(f) f.close() @@ -63,10 +63,11 @@ object Test4: later(1) - def usingLogger[T](f: {*} OutputStream, op: ({f} Logger) => T): T = + def usingLogger[sealed T](f: OutputStream^, op: Logger^{f} => T): T = val logger = Logger(f) op(logger) def test = - val later = usingFile("logfile", usingLogger(_, l => () => l.log("test"))) // error + val later = usingFile("logfile", // error + usingLogger(_, l => () => l.log("test"))) // ok, since we can widen `l` to `file` instead of to `cap` later() diff --git a/tests/neg-custom-args/captures/vars.check b/tests/neg-custom-args/captures/vars.check index e4f28fd45e93..e7055c810bb0 100644 --- a/tests/neg-custom-args/captures/vars.check +++ b/tests/neg-custom-args/captures/vars.check @@ -1,32 +1,26 @@ +-- Error: tests/neg-custom-args/captures/vars.scala:13:6 --------------------------------------------------------------- +13 | var a: String => String = f // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Mutable variable a cannot have type box String => String since + | that type captures the root capability `cap`. + | This restriction serves to prevent local capabilities from escaping the scope where they are defined. +-- Error: tests/neg-custom-args/captures/vars.scala:14:6 --------------------------------------------------------------- +14 | var b: List[String => String] = Nil // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Mutable variable b cannot have type List[String => String] since + | the part String => String of that type captures the root capability `cap`. + | This restriction serves to prevent local capabilities from escaping the scope where they are defined. -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:11:24 ----------------------------------------- 11 | val z2c: () -> Unit = z2 // error | ^^ - | Found: {z2} () -> Unit + | Found: () ->{z2} Unit | Required: () -> Unit | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:15:10 ----------------------------------------- -15 | val u = a // error - | ^ - | Found: (a : box {*} String -> String) - | Required: {*} (x$0: ? String) -> ? String - | - | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/vars.scala:16:2 --------------------------------------------------------------- -16 | a("") // error - | ^ - | box {*} String -> String cannot be box-converted to a type that can be selected or applied - | since one of their capture sets contains the root capability `*` --- Error: tests/neg-custom-args/captures/vars.scala:17:4 --------------------------------------------------------------- -17 | b.head // error - | ^^^^^^ - | The expression's type box {*} String -> String is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. --- Error: tests/neg-custom-args/captures/vars.scala:32:8 --------------------------------------------------------------- +-- Error: tests/neg-custom-args/captures/vars.scala:32:2 --------------------------------------------------------------- 32 | local { cap3 => // error - | ^ - | The expression's type box {*} (x$0: ? String) -> ? String is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. -33 | def g(x: String): String = if cap3 == cap3 then "" else "a" -34 | g -35 | } + | ^^^^^ + | Sealed type variable T cannot be instantiated to box (x$0: String) => String since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method local + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/vars.scala b/tests/neg-custom-args/captures/vars.scala index 2ad8fec53619..b7761952167e 100644 --- a/tests/neg-custom-args/captures/vars.scala +++ b/tests/neg-custom-args/captures/vars.scala @@ -1,20 +1,20 @@ class CC -type Cap = {*} CC +type Cap = CC^ def test(cap1: Cap, cap2: Cap) = def f(x: String): String = if cap1 == cap1 then "" else "a" var x = f val y = x val z = () => if x("") == "" then "a" else "b" - val zc: {cap1} () -> String = z + val zc: () ->{cap1} String = z val z2 = () => { x = identity } val z2c: () -> Unit = z2 // error - var a: String => String = f // was error, now OK - var b: List[String => String] = Nil // was error, now OK - val u = a // error - a("") // error - b.head // error + var a: String => String = f // error + var b: List[String => String] = Nil // error + val u = a // was error, now ok + a("") // was error, now ok + b.head // was error, now ok def scope = val cap3: Cap = CC() @@ -27,7 +27,7 @@ def test(cap1: Cap, cap2: Cap) = val s = scope val sc: String => String = scope - def local[T](op: Cap -> T): T = op(CC()) + def local[sealed T](op: Cap -> T): T = op(CC()) local { cap3 => // error def g(x: String): String = if cap3 == cap3 then "" else "a" @@ -35,7 +35,7 @@ def test(cap1: Cap, cap2: Cap) = } class Ref: - var elem: {cap1} String -> String = null + var elem: String ->{cap1} String = null val r = Ref() r.elem = f diff --git a/tests/neg-custom-args/deprecation/14034b.scala b/tests/neg-custom-args/deprecation/14034b.scala index d22a945fe10d..07960bba9574 100644 --- a/tests/neg-custom-args/deprecation/14034b.scala +++ b/tests/neg-custom-args/deprecation/14034b.scala @@ -9,6 +9,6 @@ type Foo0 = Exp // error type Foo = Option[Exp] // error type Bar = Option[exp.type] // error type Baz = Exp | Int // error -type Quux = [X] =>> X match // error - case Exp => Int +type Quux = [X] =>> X match + case Exp => Int // error type Quuz[A <: Exp] = Int // error diff --git a/tests/neg-custom-args/deprecation/i11022.check b/tests/neg-custom-args/deprecation/i11022.check new file mode 100644 index 000000000000..464f2827c49e --- /dev/null +++ b/tests/neg-custom-args/deprecation/i11022.check @@ -0,0 +1,20 @@ +-- Error: tests/neg-custom-args/deprecation/i11022.scala:8:7 ----------------------------------------------------------- +8 |val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg-custom-args/deprecation/i11022.scala:8:19 ---------------------------------------------------------- +8 |val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg-custom-args/deprecation/i11022.scala:9:7 ----------------------------------------------------------- +9 |val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg-custom-args/deprecation/i11022.scala:9:23 ---------------------------------------------------------- +9 |val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg-custom-args/deprecation/i11022.scala:10:14 --------------------------------------------------------- +10 |val c: Unit = CaseClass(42).magic() // error: deprecated apply method + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass diff --git a/tests/neg-custom-args/deprecation/i11022.scala b/tests/neg-custom-args/deprecation/i11022.scala new file mode 100644 index 000000000000..4608017eeed9 --- /dev/null +++ b/tests/neg-custom-args/deprecation/i11022.scala @@ -0,0 +1,11 @@ +@deprecated("no CaseClass") +case class CaseClass(rgb: Int): + def magic(): Unit = () + +object CaseClass: + def notDeprecated(): Unit = () + +val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method +val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class +val c: Unit = CaseClass(42).magic() // error: deprecated apply method +val d: Unit = CaseClass.notDeprecated() // compiles diff --git a/tests/neg-custom-args/erased/by-name.scala b/tests/neg-custom-args/erased/by-name.scala new file mode 100644 index 000000000000..707cfd96734b --- /dev/null +++ b/tests/neg-custom-args/erased/by-name.scala @@ -0,0 +1,4 @@ +def f(x: => Int, erased y: => Int) = x // error +def g(erased x: => Int, y: => Int) = y // error + +val h: (erased => Int, Int) => Int = (erased x, y) => y // error diff --git a/tests/neg-custom-args/erased/erased-in-tuples.scala b/tests/neg-custom-args/erased/erased-in-tuples.scala new file mode 100644 index 000000000000..11a251c3bd4d --- /dev/null +++ b/tests/neg-custom-args/erased/erased-in-tuples.scala @@ -0,0 +1,16 @@ +@main def Test() = + val x = 5 + val y = 7 + + val t1 = (x, erased y) // error + val t2 = (erased x, y) // error + val t1a = (x: Int, erased y: Int) // error + val t2a = (erased x: Int, y: Int) // error + + val nest = (x, (x, erased y)) // error + + def use(f: (Int, Int) => Any) = f(5, 6) + + use((_, erased _)) // error + + (x, erased y) // error diff --git a/tests/neg-custom-args/erased/i4060.scala b/tests/neg-custom-args/erased/i4060.scala new file mode 100644 index 000000000000..a1a2eee68dc0 --- /dev/null +++ b/tests/neg-custom-args/erased/i4060.scala @@ -0,0 +1,21 @@ +// See https://github.com/lampepfl/dotty/issues/4060#issuecomment-445808377 + +object App { + trait A { type L >: Any} + def upcast(erased a: A)(x: Any): a.L = x + erased val p: A { type L <: Nothing } = p + def coerce(x: Any): Int = upcast(p)(x) // error + + def coerceInline(x: Any): Int = upcast(compiletime.erasedValue[A {type L <: Nothing}])(x) // error + + trait B { type L <: Nothing } + def upcast_dep_parameter(erased a: B)(x: a.L) : Int = x + erased val q : B { type L >: Any } = compiletime.erasedValue + + def coerceInlineWithB(x: Any): Int = upcast_dep_parameter(q)(x) // error + + def main(args: Array[String]): Unit = { + println(coerce("Uh oh!")) + println(coerceInlineWithB("Uh oh!")) + } +} diff --git a/tests/neg-custom-args/erased/lambda-infer.scala b/tests/neg-custom-args/erased/lambda-infer.scala new file mode 100644 index 000000000000..2eebf8186b0d --- /dev/null +++ b/tests/neg-custom-args/erased/lambda-infer.scala @@ -0,0 +1,23 @@ +type F = (Int, erased Int) => Int + +erased class A + +@main def Test() = + val a: F = (x, y) => x + 1 // error: Expected F got (Int, Int) => Int + val b: F = (x, erased y) => x + 1 // ok + val c: F = (_, _) => 5 // error: Expected F got (Int, Int) => Int + val d: F = (_, erased _) => 5 // ok + + def use(f: F) = f(5, 6) + + use { (x, y) => x } // error: Expected F got (Int, Int) => Int + + def singleParam(f: (erased Int) => Int) = f(5) + + singleParam(x => 5) // error: Expected (erased Int) => Int got Int => Int + singleParam((erased x) => 5) // ok + + def erasedClass(f: A => Int) = f(new A) + + erasedClass(_ => 5) // ok since A is implicitly erased + diff --git a/tests/neg-custom-args/erased/multiple-args-consume.scala b/tests/neg-custom-args/erased/multiple-args-consume.scala new file mode 100644 index 000000000000..e4aaacca8969 --- /dev/null +++ b/tests/neg-custom-args/erased/multiple-args-consume.scala @@ -0,0 +1,13 @@ +def foo(erased x: Int, y: Int) = y +def bar(x: Int, erased y: Int) = x + +def consumeFoo(f: (erased x: Int, y: Int) => Int) = f(0, 1) + +val fooF: (erased x: Int, y: Int) => Int = foo +val barF: (x: Int, erased y: Int) => Int = bar + +val a = consumeFoo(foo) // ok +val b = consumeFoo(bar) // error + +val c = consumeFoo(fooF) // ok +val d = consumeFoo(barF) // error diff --git a/tests/neg-custom-args/erased/multiple-args.scala b/tests/neg-custom-args/erased/multiple-args.scala new file mode 100644 index 000000000000..fb9bce8e4573 --- /dev/null +++ b/tests/neg-custom-args/erased/multiple-args.scala @@ -0,0 +1,11 @@ +def foo(x: Int, erased y: Int): Int = x +def bar(erased x: Int, y: Int): Int = y + +val fooF: (x: Int, erased y: Int) => Int = foo + +val fooG: (erased x: Int, y: Int) => Int = foo // error + +val barF: (x: Int, erased y: Int) => Int = bar // error + +val barG: (erased x: Int, y: Int) => Int = bar + diff --git a/tests/neg-custom-args/erased/poly-functions.scala b/tests/neg-custom-args/erased/poly-functions.scala new file mode 100644 index 000000000000..000a2ca49cc9 --- /dev/null +++ b/tests/neg-custom-args/erased/poly-functions.scala @@ -0,0 +1,16 @@ +object Test: + // Poly functions with erased parameters are disallowed as an implementation restriction + + type T1 = [X] => (erased x: X, y: Int) => Int // error + type T2 = [X] => (x: X, erased y: Int) => X // error + + val t1 = [X] => (erased x: X, y: Int) => y // error + val t2 = [X] => (x: X, erased y: Int) => x // error + + // Erased classes should be detected too + erased class A + + type T3 = [X] => (x: A, y: X) => X // error + + val t3 = [X] => (x: A, y: X) => y // error + diff --git a/tests/neg-custom-args/explain/constructor-proxy-shadowing.check b/tests/neg-custom-args/explain/constructor-proxy-shadowing.check new file mode 100644 index 000000000000..db223ba33640 --- /dev/null +++ b/tests/neg-custom-args/explain/constructor-proxy-shadowing.check @@ -0,0 +1,75 @@ +-- [E177] Reference Error: tests/neg-custom-args/explain/constructor-proxy-shadowing.scala:10:12 ----------------------- +10 | val x = A22("") // error: shadowing + | ^^^ + | Reference to constructor proxy for class A22 in class A + | shadows outer reference to method A22 in object Test + | + | The instance needs to be created with an explicit `new`. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | There is an ambiguity in the meaning of the call + | + | A22(...) + | + | It could mean creating an instance of class A22 in class A with + | + | new A22(...) + | + | Or it could mean calling method A22 in object Test as in + | + | A22(...) + | + | To disambiguate, use an explicit `new` if you mean the former, + | or use a full prefix for A22 if you mean the latter. + -------------------------------------------------------------------------------------------------------------------- +-- [E177] Reference Error: tests/neg-custom-args/explain/constructor-proxy-shadowing.scala:11:12 ----------------------- +11 | val y = A33("") // error: shadowing + | ^^^ + | Reference to constructor proxy for class A33 in class A + | shadows outer reference to object A33 in object Test + | + | The instance needs to be created with an explicit `new`. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | There is an ambiguity in the meaning of the call + | + | A33(...) + | + | It could mean creating an instance of class A33 in class A with + | + | new A33(...) + | + | Or it could mean calling the apply method of object A33 in object Test as in + | + | A33.apply(...) + | + | To disambiguate, use an explicit `new` if you mean the former, + | or use a full prefix for A33 if you mean the latter. + -------------------------------------------------------------------------------------------------------------------- +-- [E177] Reference Error: tests/neg-custom-args/explain/constructor-proxy-shadowing.scala:16:8 ------------------------ +16 |val x = Seq(3) // error: shadowing + | ^^^ + | Reference to constructor proxy for class Seq + | shadows outer reference to getter Seq in package scala + | + | The instance needs to be created with an explicit `new`. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | There is an ambiguity in the meaning of the call + | + | Seq(...) + | + | It could mean creating an instance of class Seq with + | + | new Seq(...) + | + | Or it could mean calling the apply method of getter Seq in package scala as in + | + | Seq.apply(...) + | + | To disambiguate, use an explicit `new` if you mean the former, + | or use a full prefix for Seq if you mean the latter. + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/constructor-proxy-shadowing.scala b/tests/neg-custom-args/explain/constructor-proxy-shadowing.scala new file mode 100644 index 000000000000..c47fc2f4859b --- /dev/null +++ b/tests/neg-custom-args/explain/constructor-proxy-shadowing.scala @@ -0,0 +1,16 @@ + +object Test extends App { + def A22(s: String): String = s + class A33(s: String) + object A33: + def apply(s: String) = ??? + class A { + class A22(s: String) + class A33(s: String) + val x = A22("") // error: shadowing + val y = A33("") // error: shadowing + } +} + +class Seq(n: Int) +val x = Seq(3) // error: shadowing diff --git a/tests/neg-custom-args/explain/hidden-type-errors.check b/tests/neg-custom-args/explain/hidden-type-errors.check new file mode 100644 index 000000000000..551d1d7b16ba --- /dev/null +++ b/tests/neg-custom-args/explain/hidden-type-errors.check @@ -0,0 +1,23 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/explain/hidden-type-errors/Test.scala:6:24 ------------------------ +6 | val x = X.doSomething("XXX") // error + | ^^^^^^^^^^^^^^^^^^^^ + | Found: String + | Required: Int + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | Tree: t12717.A.bar("XXX") + | I tried to show that + | String + | conforms to + | Int + | but the comparison trace ended with `false`: + | + | ==> String <: Int + | ==> String <: Int + | <== String <: Int = false + | <== String <: Int = false + | + | The tests were made under the empty constraint + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/hidden-type-errors/Macro.scala b/tests/neg-custom-args/explain/hidden-type-errors/Macro.scala similarity index 100% rename from tests/neg-custom-args/hidden-type-errors/Macro.scala rename to tests/neg-custom-args/explain/hidden-type-errors/Macro.scala diff --git a/tests/neg-custom-args/hidden-type-errors/Test.scala b/tests/neg-custom-args/explain/hidden-type-errors/Test.scala similarity index 100% rename from tests/neg-custom-args/hidden-type-errors/Test.scala rename to tests/neg-custom-args/explain/hidden-type-errors/Test.scala diff --git a/tests/neg-custom-args/i11637.check b/tests/neg-custom-args/explain/i11637.check similarity index 92% rename from tests/neg-custom-args/i11637.check rename to tests/neg-custom-args/explain/i11637.check index 0664a05f4f86..82424396a43b 100644 --- a/tests/neg-custom-args/i11637.check +++ b/tests/neg-custom-args/explain/i11637.check @@ -1,4 +1,4 @@ --- [E057] Type Mismatch Error: tests/neg-custom-args/i11637.scala:11:33 ------------------------------------------------ +-- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i11637.scala:11:33 ---------------------------------------- 11 | var h = new HKT3_1[FunctorImpl](); // error // error | ^ | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any @@ -26,7 +26,7 @@ | | The tests were made under the empty constraint -------------------------------------------------------------------------------------------------------------------- --- [E057] Type Mismatch Error: tests/neg-custom-args/i11637.scala:11:21 ------------------------------------------------ +-- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i11637.scala:11:21 ---------------------------------------- 11 | var h = new HKT3_1[FunctorImpl](); // error // error | ^ | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any diff --git a/tests/neg-custom-args/i11637.scala b/tests/neg-custom-args/explain/i11637.scala similarity index 100% rename from tests/neg-custom-args/i11637.scala rename to tests/neg-custom-args/explain/i11637.scala diff --git a/tests/neg-custom-args/i15575.check b/tests/neg-custom-args/explain/i15575.check similarity index 87% rename from tests/neg-custom-args/i15575.check rename to tests/neg-custom-args/explain/i15575.check index f69111efeb96..e254e0a5e22e 100644 --- a/tests/neg-custom-args/i15575.check +++ b/tests/neg-custom-args/explain/i15575.check @@ -1,4 +1,4 @@ --- [E057] Type Mismatch Error: tests/neg-custom-args/i15575.scala:3:27 ------------------------------------------------- +-- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i15575.scala:3:27 ----------------------------------------- 3 | def bar[T]: Unit = foo[T & Any] // error | ^ | Type argument T & Any does not conform to lower bound Any @@ -18,7 +18,7 @@ | | The tests were made under the empty constraint --------------------------------------------------------------------------------------------------------------------- --- [E057] Type Mismatch Error: tests/neg-custom-args/i15575.scala:7:14 ------------------------------------------------- +-- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i15575.scala:7:14 ----------------------------------------- 7 | val _ = foo[String] // error | ^ | Type argument String does not conform to lower bound CharSequence diff --git a/tests/neg-custom-args/i15575.scala b/tests/neg-custom-args/explain/i15575.scala similarity index 100% rename from tests/neg-custom-args/i15575.scala rename to tests/neg-custom-args/explain/i15575.scala diff --git a/tests/neg-custom-args/explain/i16601a.check b/tests/neg-custom-args/explain/i16601a.check new file mode 100644 index 000000000000..63be0d2cd2b2 --- /dev/null +++ b/tests/neg-custom-args/explain/i16601a.check @@ -0,0 +1,18 @@ +-- [E042] Type Error: tests/neg-custom-args/explain/i16601a.scala:1:27 ------------------------------------------------- +1 |@main def Test: Unit = new concurrent.ExecutionContext // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ExecutionContext is a trait; it cannot be instantiated + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Abstract classes and traits need to be extended by a concrete class or object + | to make their functionality accessible. + | + | You may want to create an anonymous class extending ExecutionContext with + | class ExecutionContext { } + | + | or add a companion object with + | object ExecutionContext extends ExecutionContext + | + | You need to implement any abstract members in both cases. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/i16601a.scala b/tests/neg-custom-args/explain/i16601a.scala new file mode 100644 index 000000000000..2e058db0093c --- /dev/null +++ b/tests/neg-custom-args/explain/i16601a.scala @@ -0,0 +1 @@ +@main def Test: Unit = new concurrent.ExecutionContext // error \ No newline at end of file diff --git a/tests/neg-custom-args/explain/i16888.check b/tests/neg-custom-args/explain/i16888.check new file mode 100644 index 000000000000..53103576d158 --- /dev/null +++ b/tests/neg-custom-args/explain/i16888.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg-custom-args/explain/i16888.scala:1:38 -------------------------------------------------- +1 |def test = summon[scala.quoted.Quotes] // error + | ^ + | No given instance of type quoted.Quotes was found for parameter x of method summon in object Predef + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Maybe this method is missing a `(using Quotes)` parameter. + | + | Maybe that splice `$ { ... }` is missing? + | Given instances of `Quotes` are generated from an enclosing splice `$ { ... }` (or `scala.staging.run` call). + | A splice can be thought as a method with the following signature. + | def $[T](body: Quotes ?=> Expr[T]): T + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/i16888.scala b/tests/neg-custom-args/explain/i16888.scala new file mode 100644 index 000000000000..9d3fd0f2f57e --- /dev/null +++ b/tests/neg-custom-args/explain/i16888.scala @@ -0,0 +1 @@ +def test = summon[scala.quoted.Quotes] // error diff --git a/tests/neg-custom-args/explain/labelNotFound.check b/tests/neg-custom-args/explain/labelNotFound.check new file mode 100644 index 000000000000..594a838aeeed --- /dev/null +++ b/tests/neg-custom-args/explain/labelNotFound.check @@ -0,0 +1,10 @@ +-- [E172] Type Error: tests/neg-custom-args/explain/labelNotFound.scala:2:30 ------------------------------------------- +2 | scala.util.boundary.break(1) // error + | ^ + |No given instance of type scala.util.boundary.Label[Int] was found for parameter label of method break in object boundary + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A Label is generated from an enclosing `scala.util.boundary` call. + | Maybe that boundary is missing? + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/labelNotFound.scala b/tests/neg-custom-args/explain/labelNotFound.scala new file mode 100644 index 000000000000..2618600702da --- /dev/null +++ b/tests/neg-custom-args/explain/labelNotFound.scala @@ -0,0 +1,2 @@ +object Test: + scala.util.boundary.break(1) // error diff --git a/tests/neg-custom-args/fatal-warnings/i10994.scala b/tests/neg-custom-args/fatal-warnings/i10994.scala new file mode 100644 index 000000000000..ce5cb2cf3df9 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i10994.scala @@ -0,0 +1,2 @@ +def foo = true match + case (b: Boolean): Boolean => () // error diff --git a/tests/neg-custom-args/fatal-warnings/i15503-scala2/scala2-t11681.scala b/tests/neg-custom-args/fatal-warnings/i15503-scala2/scala2-t11681.scala new file mode 100644 index 000000000000..13d540dc2a5d --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503-scala2/scala2-t11681.scala @@ -0,0 +1,110 @@ +// scalac: -Wunused:params +// + +import Answers._ + +trait InterFace { + /** Call something. */ + def call(a: Int, b: String, c: Double): Int +} + +trait BadAPI extends InterFace { + private def f(a: Int, + b: String, // error + c: Double): Int = { + println(c) + a + } + @deprecated("no warn in deprecated API", since="yesterday") + def g(a: Int, + b: String, // OK + c: Double): Int = { + println(c) + a + } + override def call(a: Int, + b: String, // OK + c: Double): Int = { + println(c) + a + } + + def meth(x: Int) = x + + override def equals(other: Any): Boolean = true // OK + + def i(implicit s: String) = answer // ok + + /* + def future(x: Int): Int = { + val y = 42 + val x = y // maybe option to warn only if shadowed + x + } + */ +} + +// mustn't alter warnings in super +trait PoorClient extends BadAPI { + override def meth(x: Int) = ??? // OK + override def f(a: Int, b: String, c: Double): Int = a + b.toInt + c.toInt +} + +class Unusing(u: Int) { // error + def f = ??? +} + +class Valuing(val u: Int) // OK + +class Revaluing(u: Int) { def f = u } // OK + +case class CaseyKasem(k: Int) // OK + +case class CaseyAtTheBat(k: Int)(s: String) // ok + +trait Ignorance { + def f(readResolve: Int) = answer // ok +} + +class Reusing(u: Int) extends Unusing(u) // OK + +// TODO: check +// class Main { +// def main(args: Array[String]): Unit = println("hello, args") // OK +// } + +trait Unimplementation { + def f(u: Int): Int = ??? // OK +} + +trait DumbStuff { + def f(implicit dummy: DummyImplicit) = answer // ok + def g(dummy: DummyImplicit) = answer // ok +} +trait Proofs { + def f[A, B](implicit ev: A =:= B) = answer // ok + def g[A, B](implicit ev: A <:< B) = answer // ok + def f2[A, B](ev: A =:= B) = answer // ok + def g2[A, B](ev: A <:< B) = answer // ok +} + +trait Anonymous { + def f = (i: Int) => answer // ok + + def f1 = (_: Int) => answer // OK + + def f2: Int => Int = _ + 1 // OK + + def g = for (i <- List(1)) yield answer // ok +} +trait Context[A] +trait Implicits { + def f[A](implicit ctx: Context[A]) = answer // ok + def g[A: Context] = answer // OK +} +class Bound[A: Context] // OK +object Answers { + def answer: Int = 42 +} + +val a$1 = 2 \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i15503a.scala b/tests/neg-custom-args/fatal-warnings/i15503a.scala new file mode 100644 index 000000000000..cd7282490fc9 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503a.scala @@ -0,0 +1,268 @@ +// scalac: -Wunused:imports + + +object FooUnused: + import collection.mutable.Set // error + import collection.mutable.{Map => MutMap} // error + import collection.mutable._ // error + +object FooWildcardUnused: + import collection.mutable._ // error + +object Foo: + import collection.mutable.Set // OK + import collection.mutable.{Map => MutMap} // OK + + val bar = Set() // OK + val baz = MutMap() // OK + +object FooWildcard: + import collection.mutable._ // OK + + val bar = Set() // OK + +object FooNestedUnused: + import collection.mutable.Set // error + object Nested: + def hello = 1 + +object FooNested: + import collection.mutable.Set // OK + object Nested: + def hello = Set() + +object FooGivenUnused: + import SomeGivenImports.given // error + +object FooGiven: + import SomeGivenImports.given // OK + import SomeGivenImports._ // error + + val foo = summon[Int] + +/** + * Import used as type name are considered + * as used. + * + * Import here are only used as types, not as + * Term + */ +object FooTypeName: + import collection.mutable.Set // OK + import collection.mutable.Map // OK + import collection.mutable.Seq // OK + import collection.mutable.ArrayBuilder // OK + import collection.mutable.ListBuffer // error + + def checkImplicit[A](using Set[A]) = () + def checkParamType[B](a: Map[B,B]): Seq[B] = ??? + def checkTypeParam[A] = () + + checkTypeParam[ArrayBuilder[Int]] + + +object InlineChecks: + object InlineFoo: + import collection.mutable.Set // ok + import collection.mutable.Map // error + inline def getSet = Set(1) + + object InlinedBar: + import collection.mutable.Set // ok + import collection.mutable.Map // error + val a = InlineFoo.getSet + +object MacroChecks: + object StringInterpol: + import collection.mutable.Set // OK + import collection.mutable.Map // OK + println(s"This is a mutableSet : ${Set[Map[Int,Int]]()}") + + +object InnerMostCheck: + import collection.mutable.* // error + def check = + import collection.mutable.* //OK + val a = Set(1) + +object IgnoreExclusion: + import collection.mutable.{Set => _} // OK + import collection.mutable.{Map => _} // OK + import collection.mutable.{ListBuffer} // error + def check = + val a = Set(1) + val b = Map(1 -> 2) +/** + * Some given values for the test + */ +object SomeGivenImports: + given Int = 0 + given String = "foo" + +/* BEGIN : Check on packages*/ +package testsamepackageimport: + package p { + class C + } + + package p { + import p._ // error + package q { + class U { + def f = new C + } + } + } +// ----------------------- + +package testpackageimport: + package a: + val x: Int = 0 + + package b: + import a._ // error + + +/* END : Check on packages*/ + +/* BEGIN : tests on meta-language features */ +object TestGivenCoversionScala2: + /* note: scala3 Conversion[U,T] do not require an import */ + import language.implicitConversions // OK + + implicit def doubleToInt(d:Double):Int = d.toInt + + def idInt(i:Int):Int = i + val someInt = idInt(1.0) + +object TestTailrecImport: + import annotation.tailrec // OK + @tailrec + def fac(x:Int, acc:Int = 1): Int = + if x == 0 then acc else fac(x - 1, acc * x) +/* END : tests on meta-language features */ + +/* BEGIN : tests on given import order */ +object GivenImportOrderAtoB: + class X + class Y extends X + object A { implicit val x: X = new X } + object B { implicit val y: Y = new Y } + class C { + import A._ // error + import B._ // OK + def t = implicitly[X] + } + +object GivenImportOrderBtoA: + class X + class Y extends X + object A { implicit val x: X = new X } + object B { implicit val y: Y = new Y } + class C { + import B._ // OK + import A._ // error + def t = implicitly[X] + } +/* END : tests on given import order */ + +/* Scala 2 implicits */ +object Scala2ImplicitsGiven: + object A: + implicit val x: Int = 1 + object B: + import A.given // OK + val b = summon[Int] + object C: + import A.given // error + val b = 1 + object D: + import A._ // OK + val b = summon[Int] + object E: + import A._ // error + val b = 1 + object F: + import A.x // OK + val b = summon[Int] + object G: + import A.x // error + val b = 1 + +// ------------------------------------- +object TestNewKeyword: + object Foo: + class Aa[T](val x: T) + object Bar: + import Foo.Aa // OK + val v = 1 + val a = new Aa(v) + +// ------------------------------------- +object testAnnotatedType: + import annotation.switch // OK + val a = (??? : @switch) match + case _ => ??? + + +//------------------------------------- +package testImportsInImports: + package a: + package b: + val x = 1 + package c: + import a.b // OK + import b.x // OK + val y = x + +//------------------------------------- +package testOnOverloadedMethodsImports: + package a: + trait A + trait B + trait C: + def foo(x: A):A = ??? + def foo(x: B):B = ??? + package b: + object D extends a.C + package c: + import b.D.foo // error + package d: + import b.D.foo // OK + def bar = foo((??? : a.A)) + package e: + import b.D.foo // OK + def bar = foo((??? : a.B)) + package f: + import b.D.foo // OK + def bar = foo((??? : a.A)) + def baz = foo((??? : a.B)) + +//------------------------------------- +package foo.testing.rename.imports: + import collection.mutable.{Set => MutSet1} // OK + import collection.mutable.{Set => MutSet2} // OK + import collection.mutable.{Set => MutSet3} // error + type A[X] = MutSet1[X] + val a = MutSet2(1) + +//------------------------------------- +package foo.testing.imports.precedence: + import scala.collection.immutable.{BitSet => _, _} // error + import scala.collection.immutable.BitSet // OK + def t = BitSet.empty + +package foo.test.enums: + enum A: // OK + case B extends A // OK + case C extends A // OK + +package foo.test.typeapply.hklamdba.i16680: + package foo: + trait IO[A] + + package bar: + import foo.IO // OK + + def f[F[_]]: String = "hello" + def go = f[IO] \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i15503b.scala b/tests/neg-custom-args/fatal-warnings/i15503b.scala new file mode 100644 index 000000000000..c8a2d6bc2074 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503b.scala @@ -0,0 +1,144 @@ +// scalac: -Wunused:locals + +val a = 1 // OK + +var cs = 3 // OK + +val b = // OK + var e3 = 2 // error + val e1 = 1 // error + def e2 = 2 // error + 1 + +val c = // OK + var e1 = 1 // error not set + def e2 = e1 // OK + val e3 = e2 // OK + e3 + +val g = // OK + var e1 = 1 // OK + def e2 = e1 // OK + val e3 = e2 // OK + e1 = e3 // OK + e3 + +def d = 1 // OK + +def e = // OK + val e1 = 1 // error + def e2 = 2 // error + var e3 = 4 // error + 1 + +def f = // OK + val f1 = 1 // OK + var f2 = f1 // error not set + def f3 = f2 // OK + f3 + +def h = // OK + val f1 = 1 // OK + var f2 = f1 // OK + def f3 = f2 // OK + f2 = f3 // OK + f2 + +class Foo { + val a = 1 // OK + + var cs = 3 // OK + + val b = // OK + var e3 = 2 // error + val e1 = 1 // error + def e2 = 2 // error + 1 + + val c = // OK + var e1 = 1 // error not set + def e2 = e1 // OK + val e3 = e2 // OK + e3 + + val g = // OK + var e1 = 1 // OK + def e2 = e1 // OK + val e3 = e2 // OK + e1 = e3 // OK + e3 + + def d = 1 // OK + + def e = // OK + val e1 = 1 // error + def e2 = 2 // error + var e3 = 4 // error + 1 + + def f = // OK + val f1 = 1 // OK + var f2 = f1 // error not set + def f3 = f2 // OK + f3 + + def h = // OK + val f1 = 1 // OK + var f2 = f1 // OK + def f3 = f2 // OK + f2 = f3 // OK + f2 +} + +// ---- SCALA 2 tests ---- + +package foo.scala2.tests: + class Outer { + class Inner + } + + trait Locals { + def f0 = { + var x = 1 // error + var y = 2 // OK + y = 3 + y + y + } + def f1 = { + val a = new Outer // OK + val b = new Outer // error + new a.Inner + } + def f2 = { + var x = 100 // error not set + x + } + } + + object Types { + def l1() = { + object HiObject { def f = this } // OK + class Hi { // error + def f1: Hi = new Hi + def f2(x: Hi) = x + } + class DingDongDoobie // error + class Bippy // OK + type Something = Bippy // OK + type OtherThing = String // error + (new Bippy): Something + } + } + +package test.foo.twisted.i16682: + def myPackage = + object IntExtractor: // OK + def unapply(s: String): Option[Int] = s.toIntOption + + def isInt(s: String) = s match { // OK + case IntExtractor(i) => println(s"Number $i") + case _ => println("NaN") + } + isInt + + def f = myPackage("42") diff --git a/tests/neg-custom-args/fatal-warnings/i15503c.scala b/tests/neg-custom-args/fatal-warnings/i15503c.scala new file mode 100644 index 000000000000..e4e15116bf0d --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503c.scala @@ -0,0 +1,57 @@ +// scalac: -Wunused:privates +trait C +class A: + self: C => // OK + class B: + private[A] val a = 1 // OK + private[B] val b = 1 // OK + private[this] val c = 1 // error + private val d = 1 // error + + private[A] val e = 1 // OK + private[this] val f = e // OK + private val g = f // OK + + private[A] var h = 1 // OK + private[this] var i = h // error not set + private var j = i // error not set + + private[this] var k = 1 // OK + private var l = 2 // OK + private val m = // error + k = l + l = k + l + + private def fac(x: Int): Int = // error + if x == 0 then 1 else x * fac(x - 1) + + val x = 1 // OK + def y = 2 // OK + def z = g // OK + var w = 2 // OK + +package foo.test.contructors: + case class A private (x:Int) // OK + class B private (val x: Int) // OK + class C private (private val x: Int) // error + class D private (private val x: Int): // OK + def y = x + class E private (private var x: Int): // error not set + def y = x + class F private (private var x: Int): // OK + def y = + x = 3 + x + +package test.foo.i16682: + object myPackage: + private object IntExtractor: // OK + def unapply(s: String): Option[Int] = s.toIntOption + + def isInt(s: String) = s match { + case IntExtractor(i) => println(s"Number $i") + case _ => println("NaN") + } + + def f = myPackage.isInt("42") diff --git a/tests/neg-custom-args/fatal-warnings/i15503d.scala b/tests/neg-custom-args/fatal-warnings/i15503d.scala new file mode 100644 index 000000000000..6c5973c66a3a --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503d.scala @@ -0,0 +1,30 @@ +// scalac: -Wunused:unsafe-warn-patvars +// todo : change to :patvars + +sealed trait Calc +sealed trait Const extends Calc +case class Sum(a: Calc, b: Calc) extends Calc +case class S(pred: Const) extends Const +case object Z extends Const + +val a = Sum(S(S(Z)),Z) match { + case Sum(a,Z) => Z // error + // case Sum(a @ _,Z) => Z // todo : this should pass in the future + case Sum(a@S(_),Z) => Z // error + case Sum(a@S(_),Z) => a // OK + case Sum(a@S(b@S(_)), Z) => a // error + case Sum(a@S(b@S(_)), Z) => a // error + case Sum(a@S(b@(S(_))), Z) => Sum(a,b) // OK + case Sum(_,_) => Z // OK + case _ => Z // OK +} + +// todo : This should pass in the future +// val b = for { +// case Some(x) <- Option(Option(1)) +// } println(s"$x") + +// todo : This should *NOT* pass in the future +// val c = for { +// case Some(x) <- Option(Option(1)) +// } println(s"hello world") diff --git a/tests/neg-custom-args/fatal-warnings/i15503e.scala b/tests/neg-custom-args/fatal-warnings/i15503e.scala new file mode 100644 index 000000000000..57664cd08dcd --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503e.scala @@ -0,0 +1,71 @@ +// scalac: -Wunused:explicits + +object Foo { + /* This goes around the "trivial method" detection */ + val default_val = 1 + + private def f1(a: Int) = a // OK + private def f2(a: Int) = default_val // error + private def f3(a: Int)(using Int) = a // OK + private def f4(a: Int)(using Int) = default_val // error + private def f6(a: Int)(using Int) = summon[Int] // error + private def f7(a: Int)(using Int) = summon[Int] + a // OK +} + +package scala2main.unused.args: + object happyBirthday { + def main(args: Array[String]): Unit = println("Hello World") // ok + } + +package scala2main: + object happyBirthday { + def main(args: Array[String]): Unit = // OK + println(s"Hello World, there are ${args.size} arguments") + } + +package scala3main: + /* This goes around the "trivial method" detection */ + val default_unit = () + @main def hello = println("Hello World") // OK + +package foo.test.lambda.param: + val default_val = 1 + val a = (i: Int) => i // OK + val b = (i: Int) => default_val // OK + val c = (_: Int) => default_val // OK + +package foo.test.trivial: + /* A twisted test from Scala 2 */ + class C { + def answer: 42 = 42 + object X + private def g0(x: Int) = ??? // OK + private def f0(x: Int) = () // OK + private def f1(x: Int) = throw new RuntimeException // OK + private def f2(x: Int) = 42 // OK + private def f3(x: Int): Option[Int] = None // OK + private def f4(x: Int) = classOf[Int] // OK + private def f5(x: Int) = answer + 27 // OK + private def f6(x: Int) = X // OK + private def f7(x: Int) = Y // OK + private def f8(x: Int): List[C] = Nil // OK + private def f9(x: Int): List[Int] = List(1,2,3,4) // error + private def foo:Int = 32 // OK + private def f77(x: Int) = foo // error + } + object Y + +package foo.test.i16955: + class S(var r: String) // OK + +package foo.test.i16865: + trait Foo: + def fn(a: Int, b: Int): Int // OK + trait Bar extends Foo + + object Ex extends Bar: + def fn(a: Int, b: Int): Int = b + 3 // OK + + object Ex2 extends Bar: + override def fn(a: Int, b: Int): Int = b + 3 // OK + diff --git a/tests/neg-custom-args/fatal-warnings/i15503f.scala b/tests/neg-custom-args/fatal-warnings/i15503f.scala new file mode 100644 index 000000000000..f909272af732 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503f.scala @@ -0,0 +1,14 @@ +// scalac: -Wunused:implicits + +/* This goes around the "trivial method" detection */ +val default_int = 1 + +object Xd { + private def f1(a: Int) = a // OK + private def f2(a: Int) = 1 // OK + private def f3(a: Int)(using Int) = a // OK + private def f4(a: Int)(using Int) = default_int // OK + private def f6(a: Int)(using Int) = summon[Int] // OK + private def f7(a: Int)(using Int) = summon[Int] + a // OK + private def f8(a: Int)(using foo: Int) = a // error +} diff --git a/tests/neg-custom-args/fatal-warnings/i15503g.scala b/tests/neg-custom-args/fatal-warnings/i15503g.scala new file mode 100644 index 000000000000..2185bfed711d --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503g.scala @@ -0,0 +1,23 @@ +// scalac: -Wunused:params + +/* This goes around the "trivial method" detection */ +object Foo { + val default_int = 1 + + private def f1(a: Int) = a // OK + private def f2(a: Int) = default_int // error + private def f3(a: Int)(using Int) = a // OK + private def f4(a: Int)(using Int) = default_int // error + private def f6(a: Int)(using Int) = summon[Int] // error + private def f7(a: Int)(using Int) = summon[Int] + a // OK + /* --- Trivial method check --- */ + private def g1(x: Int) = 1 // OK + private def g2(x: Int) = ??? // OK +} + +package foo.test.i17101: + type Test[A] = A + extension[A] (x: Test[A]) { // OK + def value: A = x + def causesIssue: Unit = println("oh no") + } diff --git a/tests/neg-custom-args/fatal-warnings/i15503h.scala b/tests/neg-custom-args/fatal-warnings/i15503h.scala new file mode 100644 index 000000000000..3bab6cdbd098 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503h.scala @@ -0,0 +1,20 @@ +// scalac: -Wunused:linted + +import collection.mutable.Set // error + +class A { + private val a = 1 // error + val b = 2 // OK + + private def c = 2 // error + def d(using x:Int): Int = b // ok + def e(x: Int) = 1 // OK + def f = + val x = 1 // error + def f = 2 // error + 3 + + def g(x: Int): Int = x match + case x:1 => 0 // OK + case _ => 1 +} \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i15503i.scala b/tests/neg-custom-args/fatal-warnings/i15503i.scala new file mode 100644 index 000000000000..768e4d5c3ce0 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503i.scala @@ -0,0 +1,315 @@ +// scalac: -Wunused:all + +import collection.mutable.{Map => MutMap} // error +import collection.mutable.Set // error + +class A { + import collection.mutable.{Map => MutMap} // OK + private val a = 1 // error + val b = 2 // OK + + /* This goes around the trivial method detection */ + val default_int = 12 + + val someMap = MutMap() + + private def c1 = 2 // error + private def c2 = 2 // OK + def c3 = c2 + + def d1(using x:Int): Int = default_int // ok + def d2(using x:Int): Int = x // OK + + def e1(x: Int) = default_int // ok + def e2(x: Int) = x // OK + def f = + val x = 1 // error + def f = 2 // error + val y = 3 // OK + def g = 4 // OK + y + g + + // todo : uncomment once patvars is fixed + // def g(x: Int): Int = x match + // case x:1 => 0 // ?error + // case x:2 => x // ?OK + // case _ => 1 // ?OK +} + +/* ---- CHECK scala.annotation.unused ---- */ +package foo.test.scala.annotation: + import annotation.unused // OK + + /* This goes around the trivial method detection */ + val default_int = 12 + + def a1(a: Int) = a // OK + def a2(a: Int) = default_int // ok + + def a3(@unused a: Int) = default_int //OK + + def b1 = + def f = 1 // error + 1 + + def b2 = + def f = 1 // OK + f + + def b3 = + @unused def f = 1 // OK + 1 + + object Foo: + private def a = 1 // error + private def b = 2 // OK + @unused private def c = 3 // OK + + def other = b + +package foo.test.companionprivate: + class A: + import A.b // OK + def a = b // OK + + object A: + private def b = c // OK + def c = List(1,2,3) // OK + +package foo.test.i16678: + def foo(func: Int => String, value: Int): String = func(value) // OK + + def run = + println(foo(number => number.toString, value = 5)) // OK + println(foo(number => "", value = 5)) // error + println(foo(func = number => "", value = 5)) // error + println(foo(func = number => number.toString, value = 5)) // OK + println(foo(func = _.toString, value = 5)) // OK + +package foo.test.possibleclasses: + case class AllCaseClass( + k: Int, // OK + private val y: Int // OK /* Kept as it can be taken from pattern */ + )( + s: Int, + val t: Int, // OK + private val z: Int // error + ) + + case class AllCaseUsed( + k: Int, // OK + private val y: Int // OK + )( + s: Int, // OK + val t: Int, // OK + private val z: Int // OK + ) { + def a = k + y + s + t + z + } + + class AllClass( + k: Int, // error + private val y: Int // error + )( + s: Int, // error + val t: Int, // OK + private val z: Int // error + ) + + class AllUsed( + k: Int, // OK + private val y: Int // OK + )( + s: Int, // OK + val t: Int, // OK + private val z: Int // OK + ) { + def a = k + y + s + t + z + } + +package foo.test.possibleclasses.withvar: + case class AllCaseClass( + k: Int, // OK + private var y: Int // OK /* Kept as it can be taken from pattern */ + )( + s: Int, + var t: Int, // OK + private var z: Int // error + ) + + case class AllCaseUsed( + k: Int, // OK + private var y: Int // OK + )( + s: Int, // OK + var t: Int, // OK global scope can be set somewhere else + private var z: Int // error not set + ) { + def a = k + y + s + t + z + } + + class AllClass( + k: Int, // error + private var y: Int // error + )( + s: Int, // error + var t: Int, // OK + private var z: Int // error + ) + + class AllUsed( + k: Int, // OK + private var y: Int // error not set + )( + s: Int, // OK + var t: Int, // OK global scope can be set somewhere else + private var z: Int // error not set + ) { + def a = k + y + s + t + z + } + + + +package foo.test.from.i16675: + case class PositiveNumber private (i: Int) // OK + object PositiveNumber: + def make(i: Int): Option[PositiveNumber] = //OK + Option.when(i >= 0)(PositiveNumber(i)) // OK + +package foo.test.i16822: + enum ExampleEnum { + case Build(context: String) // OK + case List // OK + } + + def demo = { + val x = ExampleEnum.List // OK + println(x) // OK + } + +package foo.test.i16877: + import scala.collection.immutable.HashMap // OK + import scala.annotation.StaticAnnotation // OK + + class ExampleAnnotation(val a: Object) extends StaticAnnotation // OK + + @ExampleAnnotation(new HashMap()) // OK + class Test //OK + +package foo.test.i16926: + def hello(): Unit = + for { + i <- (0 to 10).toList + (a, b) = "hello" -> "world" // OK + } yield println(s"$a $b") + +package foo.test.i16925: + def hello = + for { + i <- 1 to 2 if true + _ = println(i) // OK + } yield () + +package foo.test.i16863a: + import scala.quoted.* + def fn(using Quotes) = + val x = Expr(1) + '{ $x + 2 } // OK + +package foo.test.i16863b: + import scala.quoted.* + def fn[A](using Quotes, Type[A]) = // OK + val numeric = Expr.summon[Numeric[A]].getOrElse(???) + '{ $numeric.fromInt(3) } // OK + +package foo.test.i16863c: + import scala.quoted.* + def fn[A](expr: Expr[Any])(using Quotes) = + val imp = expr match + case '{ ${ _ }: a } => Expr.summon[Numeric[a]] // OK + println(imp) + +package foo.test.i16863d: + import scala.quoted.* + import scala.compiletime.asMatchable // OK + def fn[A](using Quotes, Type[A]) = + import quotes.reflect.* + val imp = TypeRepr.of[A].widen.asMatchable match + case Refinement(_,_,_) => () + println(imp) + +package foo.test.i16679a: + object myPackage: + trait CaseClassName[A]: + def name: String + object CaseClassName: + trait CaseClassByStringName[A] extends CaseClassName[A] + import scala.deriving.Mirror + object CaseClassByStringName: + inline final def derived[A](using inline A: Mirror.Of[A]): CaseClassByStringName[A] = + new CaseClassByStringName[A]: + def name: String = A.toString + + object secondPackage: + import myPackage.CaseClassName // OK + case class CoolClass(i: Int) derives CaseClassName.CaseClassByStringName + println(summon[CaseClassName[CoolClass]].name) + +package foo.test.i16679b: + object myPackage: + trait CaseClassName[A]: + def name: String + + object CaseClassName: + import scala.deriving.Mirror + inline final def derived[A](using inline A: Mirror.Of[A]): CaseClassName[A] = + new CaseClassName[A]: + def name: String = A.toString + + object Foo: + given x: myPackage.CaseClassName[secondPackage.CoolClass] = null + + object secondPackage: + import myPackage.CaseClassName // OK + import Foo.x + case class CoolClass(i: Int) + println(summon[myPackage.CaseClassName[CoolClass]]) + +package foo.test.i17156: + package a: + trait Foo[A] + object Foo: + inline def derived[T]: Foo[T] = new Foo{} + + package b: + import a.Foo + type Xd[A] = Foo[A] + + package c: + import b.Xd + trait Z derives Xd + + +package foo.test.i17175: + val continue = true + def foo = + for { + i <- 1.until(10) // OK + if continue + } { + println(i) + } + +package foo.test.i17117: + package example { + object test1 { + val test = "test" + } + + object test2 { + + import example.test1 as t1 + + val test = t1.test + } + } diff --git a/tests/neg-custom-args/fatal-warnings/i15503j.scala b/tests/neg-custom-args/fatal-warnings/i15503j.scala new file mode 100644 index 000000000000..51c1fa6fda0c --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15503j.scala @@ -0,0 +1,59 @@ +// scalac: -Wunused:strict-no-implicit-warn + +package foo.unused.strict.test: + package a: + given x: Int = 0 + implicit val y: Int = 1 + val z: Int = 2 + def f: Int = 3 + package b: + import a.given // OK + import a._ // OK + import a.* // OK + import a.x // OK + import a.y // OK + import a.z // error + import a.f // error + package c: + import a.given // OK + import a.x // OK + import a.y // OK + import a.z // OK + import a.f // OK + def g = f + z + y + x + +package foo.implicits.resolution: + class X + class Y extends X + object A { implicit val x: X = new X } + object B { implicit val y: Y = new Y } + class C { + import A._ // OK + import B._ // OK + def t = implicitly[X] + } + +package foo.unused.summon.inlines: + package lib: + trait A + trait B + trait C + trait X + + given willBeUnused: (A & X) = new A with X {} + given willBeUsed: (A & B) = new A with B {} + + package use: + import lib.{A, B, C, willBeUnused, willBeUsed} //OK + import compiletime.summonInline //OK + + transparent inline given conflictInside: C = + summonInline[A] + new {} + + transparent inline given potentialConflict: C = + summonInline[B] + new {} + + val b: B = summon[B] + val c: C = summon[C] \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i15662.scala b/tests/neg-custom-args/fatal-warnings/i15662.scala index 1d5ff21eb3ba..afe505922603 100644 --- a/tests/neg-custom-args/fatal-warnings/i15662.scala +++ b/tests/neg-custom-args/fatal-warnings/i15662.scala @@ -3,7 +3,6 @@ case class Composite[T](v: T) def m(composite: Composite[_]): Unit = composite match { case Composite[Int](v) => println(v) // error: cannot be checked at runtime - case _ => println("OTHER") } def m2(composite: Composite[_]): Unit = diff --git a/tests/neg-custom-args/fatal-warnings/i15893.scala b/tests/neg-custom-args/fatal-warnings/i15893.scala new file mode 100644 index 000000000000..f23e6150106a --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i15893.scala @@ -0,0 +1,61 @@ +sealed trait NatT +case class Zero() extends NatT +case class Succ[+N <: NatT](n: N) extends NatT + +type Mod2[N <: NatT] <: NatT = N match + case Zero => Zero + case Succ[Zero] => Succ[Zero] + case Succ[Succ[predPredN]] => Mod2[predPredN] + +def mod2(n: NatT): NatT = n match + case Zero() => Zero() + case Succ(Zero()) => Succ(Zero()) + case Succ(Succ(predPredN)) => mod2(predPredN) + +inline def inlineMod2(inline n: NatT): NatT = inline n match + case Zero() => Zero() + case Succ(Zero()) => Succ(Zero()) + case Succ(Succ(predPredN)) => inlineMod2(predPredN) + +transparent inline def transparentInlineMod2(inline n: NatT): NatT = inline n match + case Zero() => Zero() + case Succ(Zero()) => Succ(Zero()) + case Succ(Succ(predPredN)) => transparentInlineMod2(predPredN) + +def dependentlyTypedMod2[N <: NatT](n: N): Mod2[N] = n match + case Zero(): Zero => Zero() // error + case Succ(Zero()): Succ[Zero] => Succ(Zero()) // error + case Succ(Succ(predPredN)): Succ[Succ[_]] => dependentlyTypedMod2(predPredN) // error + +inline def inlineDependentlyTypedMod2[N <: NatT](inline n: N): Mod2[N] = inline n match + case Zero(): Zero => Zero() // error + case Succ(Zero()): Succ[Zero] => Succ(Zero()) // error + case Succ(Succ(predPredN)): Succ[Succ[_]] => inlineDependentlyTypedMod2(predPredN) // error + +transparent inline def transparentInlineDependentlyTypedMod2[N <: NatT](inline n: N): Mod2[N] = inline n match + case Zero(): Zero => Zero() // error + case Succ(Zero()): Succ[Zero] => Succ(Zero()) // error + case Succ(Succ(predPredN)): Succ[Succ[_]] => transparentInlineDependentlyTypedMod2(predPredN) // error + +def foo(n: NatT): NatT = mod2(n) match + case Succ(Zero()) => Zero() + case _ => n + +inline def inlineFoo(inline n: NatT): NatT = inline inlineMod2(n) match + case Succ(Zero()) => Zero() + case _ => n + +inline def transparentInlineFoo(inline n: NatT): NatT = inline transparentInlineMod2(n) match + case Succ(Zero()) => Zero() + case _ => n + +@main def main(): Unit = + println(mod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(foo(Succ(Succ(Succ(Zero()))))) // prints Zero(), as expected + println(inlineMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(inlineFoo(Succ(Succ(Succ(Zero()))))) // prints Succ(Succ(Succ(Zero()))); unexpected + println(transparentInlineMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(transparentInlineFoo(Succ(Succ(Succ(Zero()))))) // prints Zero(), as expected + println(dependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // runtime error; unexpected + println(inlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected + println(transparentInlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected diff --git a/tests/neg-custom-args/fatal-warnings/i16639a.scala b/tests/neg-custom-args/fatal-warnings/i16639a.scala new file mode 100644 index 000000000000..c62910b7f566 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16639a.scala @@ -0,0 +1,207 @@ +// scalac: -Wunused:all +// +class Bippy(a: Int, b: Int) { + private def this(c: Int) = this(c, c) // warn /Dotty:NoWarn + private def boop(x: Int) = x+a+b // error + private def bippy(x: Int): Int = bippy(x) // error TODO: could warn + final private val MILLIS1 = 2000 // error no warn, /Dotty:Warn + final private val MILLIS2: Int = 1000 // error + final private val HI_COMPANION: Int = 500 // no warn, accessed from companion + def hi() = Bippy.HI_INSTANCE +} +object Bippy { + def hi(x: Bippy) = x.HI_COMPANION + private val HI_INSTANCE: Int = 500 // no warn, accessed from instance + private val HEY_INSTANCE: Int = 1000 // error warn + private lazy val BOOL: Boolean = true // error warn +} + +class A(val msg: String) +class B1(msg: String) extends A(msg) +class B2(msg0: String) extends A(msg0) +class B3(msg0: String) extends A("msg") // error /Dotty: unused explicit parameter + +trait Bing + +trait Accessors { + private var v1: Int = 0 // error warn + private var v2: Int = 0 // error warn, never set + private var v3: Int = 0 // warn, never got /Dotty: no warn even if not used + private var v4: Int = 0 // no warn + + private[this] var v5 = 0 // error warn, never set + private[this] var v6 = 0 // warn, never got /Dotty: no warn even if not used + private[this] var v7 = 0 // no warn + + def bippy(): Int = { + v3 = 3 + v4 = 4 + v6 = 6 + v7 = 7 + v2 + v4 + v5 + v7 + } +} + +class StableAccessors { + private var s1: Int = 0 // error warn + private var s2: Int = 0 // error warn, never set + private var s3: Int = 0 // warn, never got /Dotty: no warn even if not usued + private var s4: Int = 0 // no warn + + private[this] var s5 = 0 // error warn, never set + private[this] var s6 = 0 // no warn, limitation /Dotty: Why limitation ? + private[this] var s7 = 0 // no warn + + def bippy(): Int = { + s3 = 3 + s4 = 4 + s6 = 6 + s7 = 7 + s2 + s4 + s5 + s7 + } +} + +trait DefaultArgs { + // warn about default getters for x2 and x3 + private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 // no more warn warn since #17061 + + def boppy() = bippy(5, 100, 200) +} + + +class Outer { + class Inner +} + +trait Locals { + def f0 = { + var x = 1 // error warn + var y = 2 + y = 3 + y + y + } + def f1 = { + val a = new Outer // no warn + val b = new Outer // error warn + new a.Inner + } + def f2 = { + var x = 100 // error warn about it being a var, var not set + x + } +} + +object Types { + private object Dongo { def f = this } // no more warn since #17061 + private class Bar1 // error warn + private class Bar2 // no warn + private type Alias1 = String // error warn + private type Alias2 = String // no warn + def bippo = (new Bar2).toString + + def f(x: Alias2) = x.length + + def l1() = { + object HiObject { def f = this } // no more warn since #17061 + class Hi { // error warn + def f1: Hi = new Hi + def f2(x: Hi) = x + } + class DingDongDoobie // error warn + class Bippy // no warn + type Something = Bippy // no warn + type OtherThing = String // error warn + (new Bippy): Something + } +} + +trait Underwarn { + def f(): Seq[Int] + + def g() = { + val Seq(_, _) = f() // no warn + true + } +} + +class OtherNames { + private def x_=(i: Int): Unit = () // no more warn since #17061 + private def x: Int = 42 // error Dotty triggers unused private member : To investigate + private def y_=(i: Int): Unit = () // // no more warn since #17061 + private def y: Int = 42 + + def f = y +} + + +trait Forever { + def f = { + val t = Option((17, 42)) + for { + ns <- t + (i, j) = ns // no warn + } yield (i + j) + } + def g = { + val t = Option((17, 42)) + for { + ns <- t + (i, j) = ns // no warn + } yield 42 // val emitted only if needed, hence nothing unused + } +} + +trait Ignorance { + private val readResolve = 42 // error ignore /dotty triggers unused private member/ why should we ignore ? +} + +trait CaseyKasem { + def f = 42 match { + case x if x < 25 => "no warn" + case y if toString.nonEmpty => "no warn" + y + case z => "warn" + } +} +trait CaseyAtTheBat { + def f = Option(42) match { + case Some(x) if x < 25 => "no warn" + case Some(y @ _) if toString.nonEmpty => "no warn" + case Some(z) => "warn" + case None => "no warn" + } +} + +class `not even using companion privates` + +object `not even using companion privates` { + private implicit class `for your eyes only`(i: Int) { // no more warn since #17061 + def f = i + } +} + +class `no warn in patmat anonfun isDefinedAt` { + def f(pf: PartialFunction[String, Int]) = pf("42") + def g = f { + case s => s.length // no warn (used to warn case s => true in isDefinedAt) + } +} + +// this is the ordinary case, as AnyRef is an alias of Object +class `nonprivate alias is enclosing` { + class C + type C2 = C + private class D extends C2 // error warn +} + +object `classof something` { + private class intrinsically + def f = classOf[intrinsically].toString() +} + +trait `short comings` { + def f: Int = { + val x = 42 // error /Dotty only triggers in dotty + 17 + } +} + diff --git a/tests/neg-custom-args/fatal-warnings/i16649-refutable.check b/tests/neg-custom-args/fatal-warnings/i16649-refutable.check new file mode 100644 index 000000000000..5b3d460c7f09 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16649-refutable.check @@ -0,0 +1,8 @@ +-- Error: tests/neg-custom-args/fatal-warnings/i16649-refutable.scala:4:6 ---------------------------------------------- +4 | val '{ ($y: Int) + ($z: Int) } = x // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | pattern binding uses refutable extractor `'{...}` + | + | If this usage is intentional, this can be communicated by adding `: @unchecked` after the expression, + | which may result in a MatchError at runtime. + | This patch can be rewritten automatically under -rewrite -source 3.2-migration. diff --git a/tests/neg-custom-args/fatal-warnings/i16649-refutable.scala b/tests/neg-custom-args/fatal-warnings/i16649-refutable.scala new file mode 100644 index 000000000000..2a42f652e093 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16649-refutable.scala @@ -0,0 +1,4 @@ +import quoted.* + +def foo(using Quotes)(x: Expr[Int]) = + val '{ ($y: Int) + ($z: Int) } = x // error diff --git a/tests/neg-custom-args/fatal-warnings/i16728.check b/tests/neg-custom-args/fatal-warnings/i16728.check new file mode 100644 index 000000000000..a797baf19be0 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16728.check @@ -0,0 +1,4 @@ +-- Error: tests/neg-custom-args/fatal-warnings/i16728.scala:16:11 ------------------------------------------------------ +16 | case tx : C[Int]#X => // error + | ^ + | the type test for C[Int] cannot be checked at runtime because its type arguments can't be determined from A diff --git a/tests/neg-custom-args/fatal-warnings/i16728.scala b/tests/neg-custom-args/fatal-warnings/i16728.scala new file mode 100644 index 000000000000..42c860cc40b2 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16728.scala @@ -0,0 +1,32 @@ +class A[T] { + class X { + def outer : A.this.type = A.this + } +} + +class B extends A[Int] +class C[T] extends A[T] + +object Test { + def main(args: Array[String]) : Unit = { + val b0 = new B + val b0x : A[?]#X = new b0.X + + def test = b0x match { + case tx : C[Int]#X => // error + val c : C[Int] = tx.outer + c + case _ => + "no match" + } + + def test2 = b0x match { + case tx : C[Int]#X @unchecked => // ok + val c : C[Int] = tx.outer + c + case _ => + "no match" + } + + } +} \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i16876/Macro.scala b/tests/neg-custom-args/fatal-warnings/i16876/Macro.scala new file mode 100644 index 000000000000..2823de1f72c5 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16876/Macro.scala @@ -0,0 +1,23 @@ +import scala.quoted.* + +def findMethodSymbol(using q: Quotes)(s: quotes.reflect.Symbol): quotes.reflect.Symbol = + if s.isDefDef then + s + else + findMethodSymbol(using q)(s.owner) +end findMethodSymbol + + +inline def adder: Int = ${ + adderImpl +} + +def adderImpl(using q: Quotes): Expr[Int] = + import quotes.reflect.* + + val inputs = findMethodSymbol(using q)(q.reflect.Symbol.spliceOwner).tree match + case DefDef(_, params, _, _) => + params.last match + case TermParamClause(valDefs) => + valDefs.map(vd => Ref(vd.symbol).asExprOf[Int]) + inputs.reduce((exp1, exp2) => '{ $exp1 + $exp2 }) \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i16876/Test.scala b/tests/neg-custom-args/fatal-warnings/i16876/Test.scala new file mode 100644 index 000000000000..d9229d31cd6d --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16876/Test.scala @@ -0,0 +1,11 @@ +// scalac: -Wunused:all + +object Foo { + private def myMethod(a: Int, b: Int, c: Int) = adder // ok + myMethod(1, 2, 3) + + private def myMethodFailing(a: Int, b: Int, c: Int) = a + 0 // error // error + myMethodFailing(1, 2, 3) +} + + diff --git a/tests/neg-custom-args/fatal-warnings/i16930.scala b/tests/neg-custom-args/fatal-warnings/i16930.scala new file mode 100644 index 000000000000..1f6c5bf1a09f --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16930.scala @@ -0,0 +1,22 @@ +// scalac: -Wunused:imports + +trait Outer: + trait Used + trait Unused + +object Test { + val outer: Outer = ??? + import outer.{Used, Unused} // error + def foo(x: Any): Used = x.asInstanceOf[Used] +} + +trait Outer1: + trait UnusedToo1 + trait Unused1 + def unusedToo1: UnusedToo1 + +object Test1 { + val outer1: Outer1 = ??? + import outer1.{Unused1, UnusedToo1} // error // error + def foo() = outer1.unusedToo1 // in this case UnusedToo1 is not used explicitly, only inferred +} diff --git a/tests/neg-custom-args/fatal-warnings/i17314b.scala b/tests/neg-custom-args/fatal-warnings/i17314b.scala new file mode 100644 index 000000000000..384767765cf4 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i17314b.scala @@ -0,0 +1,14 @@ +// scalac: -Wunused:all + +package foo: + class Foo[T] + given Foo[Int] = new Foo[Int] + + +package bar: + import foo.{given foo.Foo[Int]} // error + import foo.Foo + + given Foo[Int] = ??? + + val repro: Foo[Int] = summon[Foo[Int]] diff --git a/tests/neg-custom-args/fatal-warnings/i17335.scala b/tests/neg-custom-args/fatal-warnings/i17335.scala new file mode 100644 index 000000000000..6629e2f151c9 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i17335.scala @@ -0,0 +1,4 @@ +// scalac: -Wunused:all + +def aMethod() = + doStuff { (x) => x } // error diff --git a/tests/neg-custom-args/fatal-warnings/i8711.check b/tests/neg-custom-args/fatal-warnings/i8711.check index 0035af0755d4..491d1678b5ac 100644 --- a/tests/neg-custom-args/fatal-warnings/i8711.check +++ b/tests/neg-custom-args/fatal-warnings/i8711.check @@ -6,3 +6,7 @@ 12 | case x: C => x // error | ^^^^ | Unreachable case +-- [E030] Match case Unreachable Error: tests/neg-custom-args/fatal-warnings/i8711.scala:17:9 -------------------------- +17 | case x: (B | C) => x // error + | ^^^^^^^^^^ + | Unreachable case diff --git a/tests/neg-custom-args/fatal-warnings/i8711.scala b/tests/neg-custom-args/fatal-warnings/i8711.scala index e37f7a8b039f..46fc5a85c90a 100644 --- a/tests/neg-custom-args/fatal-warnings/i8711.scala +++ b/tests/neg-custom-args/fatal-warnings/i8711.scala @@ -12,4 +12,9 @@ object Test { case x: C => x // error case _ => } + + def baz(x: A) = x match { + case x: (B | C) => x // error + case _ => + } } diff --git a/tests/neg-custom-args/fatal-warnings/inline-givens.scala b/tests/neg-custom-args/fatal-warnings/inline-givens.scala new file mode 100644 index 000000000000..eae50bca45cf --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/inline-givens.scala @@ -0,0 +1,15 @@ + +class Item(x: String) + +inline given a: Conversion[String, Item] = + Item(_) // error + +inline given b: Conversion[String, Item] = + (x => Item(x)) // error + +inline given c: Conversion[String, Item] = + { x => Item(x) } // error + +inline given d: Conversion[String, Item] with + def apply(x: String) = Item(x) // ok + diff --git a/tests/neg-custom-args/fatal-warnings/nonunit-statement.scala b/tests/neg-custom-args/fatal-warnings/nonunit-statement.scala new file mode 100644 index 000000000000..399d132edfae --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/nonunit-statement.scala @@ -0,0 +1,198 @@ +// scalac: -Wnonunit-statement -Wvalue-discard +import collection.ArrayOps +import collection.mutable.{ArrayBuilder, LinkedHashSet, ListBuffer} +import concurrent._ +import scala.reflect.ClassTag + +class C { + import ExecutionContext.Implicits._ + def c = { + def improved = Future(42) + def stale = Future(27) + improved // error + stale + } +} +class D { + def d = { + class E + new E().toString // error + new E().toString * 2 + } +} +class F { + import ExecutionContext.Implicits._ + Future(42) // error +} +// unused template expression uses synthetic method of class +case class K(s: String) { + copy() // error +} +// mutations returning this are ok +class Mutate { + val b = ListBuffer.empty[Int] + b += 42 // nowarn, returns this.type + val xs = List(42) + 27 +: xs // error + + def f(x: Int): this.type = this + def g(): Unit = f(42) // nowarn +} +// some uninteresting expressions may warn for other reasons +class WhoCares { + null // error for purity + ??? // nowarn for impurity +} +// explicit Unit ascription to opt out of warning, even for funky applies +class Absolution { + def f(i: Int): Int = i+1 + import ExecutionContext.Implicits._ + // Future(42): Unit // nowarn { F(42)(ctx) }: Unit where annot is on F(42) + // f(42): Unit // nowarn +} +// warn uni-branched unless user disables it with -Wnonunit-if:false +class Boxed[A](a: A) { + def isEmpty = false + def foreach[U](f: A => U): Unit = + if (!isEmpty) f(a) // error (if) + def forall(f: A => Boolean): Unit = + if (!isEmpty) { + println(".") + f(a) // error (if) + } + def take(p: A => Boolean): Option[A] = { + while (isEmpty || !p(a)) () + Some(a).filter(p) + } +} +class Unibranch[A, B] { + def runWith[U](action: B => U): A => Boolean = { x => + val z = null.asInstanceOf[B] + val fellback = false + if (!fellback) action(z) // error (if) + !fellback + } + def f(i: Int): Int = { + def g = 17 + if (i < 42) { + g // error block statement + println("uh oh") + g // error (if) + } + while (i < 42) { + g // error + println("uh oh") + g // error + } + 42 + } +} +class Dibranch { + def i: Int = ??? + def j: Int = ??? + def f(b: Boolean): Int = { + // if-expr might have an uninteresting LUB + if (b) { // error, at least one branch looks interesting + println("true") + i + } + else { + println("false") + j + } + 42 + } +} +class Next[A] { + val all = ListBuffer.empty[A] + def f(it: Iterator[A], g: A => A): Unit = + while (it.hasNext) + all += g(it.next()) // nowarn +} +class Setting[A] { + def set = LinkedHashSet.empty[A] + def f(a: A): Unit = { + set += a // error because cannot know whether the `set` was supposed to be consumed or assigned + println(set) + } +} +// neither StringBuilder warns, because either append is Java method or returns this.type +// while loop looks like if branch with block1(block2, jump to label), where block2 typed as non-unit +class Strung { + def iterator = Iterator.empty[String] + def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { + val jsb = b.underlying + if (start.length != 0) jsb.append(start) // error (value-discard) + val it = iterator + if (it.hasNext) { + jsb.append(it.next()) + while (it.hasNext) { + jsb.append(sep) // nowarn (java) + jsb.append(it.next()) // error (value-discard) + } + } + if (end.length != 0) jsb.append(end) // error (value-discard) + b + } + def f(b: java.lang.StringBuilder, it: Iterator[String]): String = { + while (it.hasNext) { + b.append("\n") // nowarn (java) + b.append(it.next()) // error (value-discard) + } + b.toString + } + def g(b: java.lang.StringBuilder, it: Iterator[String]): String = { + while (it.hasNext) it.next() // error + b.toString + } +} +class J { + import java.util.Collections + def xs: java.util.List[Int] = ??? + def f(): Int = { + Collections.checkedList[Int](xs, classOf[Int]) + 42 + } +} +class Variant { + var bs = ListBuffer.empty[Int] + val xs = ListBuffer.empty[Int] + private[this] val ys = ListBuffer.empty[Int] + private[this] var zs = ListBuffer.empty[Int] + def f(i: Int): Unit = { + bs.addOne(i) + xs.addOne(i) + ys.addOne(i) + zs.addOne(i) + println("done") + } +} +final class ArrayOops[A](private val xs: Array[A]) extends AnyVal { + def other: ArrayOps[A] = ??? + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- other) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } +} +class Depends { + def f[A](a: A): a.type = a + def g() = { + val d = new Depends + f(d) + () + } +} diff --git a/tests/neg-custom-args/feature/convertible.scala b/tests/neg-custom-args/feature/convertible.scala new file mode 100644 index 000000000000..1b9e1c79f011 --- /dev/null +++ b/tests/neg-custom-args/feature/convertible.scala @@ -0,0 +1,29 @@ +import language.experimental.into + +class Text(val str: String) + +object Test: + + given Conversion[String, Text] = Text(_) + + def f(x: Text, y: => Text, zs: Text*) = + println(s"${x.str} ${y.str} ${zs.map(_.str).mkString(" ")}") + + f("abc", "def") // error // error + f("abc", "def", "xyz", "uvw") // error // error // error // error + f("abc", "def", "xyz", Text("uvw")) // error // error // error + + def g(x: into Text) = + println(x.str) + + + g("abc") // OK + val gg = g + gg("abc") // straight eta expansion is also OK + + def h1[X](x: X)(y: X): Unit = () + + def h(x: into Text) = + val y = h1(x) + y("abc") // error, inference through type variable does not propagate + diff --git a/tests/neg-custom-args/feature-shadowing.scala b/tests/neg-custom-args/feature/feature-shadowing.scala similarity index 100% rename from tests/neg-custom-args/feature-shadowing.scala rename to tests/neg-custom-args/feature/feature-shadowing.scala diff --git a/tests/neg-custom-args/i13946/BadPrinter.scala b/tests/neg-custom-args/feature/i13946/BadPrinter.scala similarity index 100% rename from tests/neg-custom-args/i13946/BadPrinter.scala rename to tests/neg-custom-args/feature/i13946/BadPrinter.scala diff --git a/tests/neg-custom-args/i13946/Printer.scala b/tests/neg-custom-args/feature/i13946/Printer.scala similarity index 100% rename from tests/neg-custom-args/i13946/Printer.scala rename to tests/neg-custom-args/feature/i13946/Printer.scala diff --git a/tests/neg-custom-args/impl-conv/A.scala b/tests/neg-custom-args/feature/impl-conv/A.scala similarity index 100% rename from tests/neg-custom-args/impl-conv/A.scala rename to tests/neg-custom-args/feature/impl-conv/A.scala diff --git a/tests/neg-custom-args/impl-conv/B.scala b/tests/neg-custom-args/feature/impl-conv/B.scala similarity index 100% rename from tests/neg-custom-args/impl-conv/B.scala rename to tests/neg-custom-args/feature/impl-conv/B.scala diff --git a/tests/neg-custom-args/implicit-conversions-old.scala b/tests/neg-custom-args/feature/implicit-conversions-old.scala similarity index 100% rename from tests/neg-custom-args/implicit-conversions-old.scala rename to tests/neg-custom-args/feature/implicit-conversions-old.scala diff --git a/tests/neg-custom-args/implicit-conversions.scala b/tests/neg-custom-args/feature/implicit-conversions.scala similarity index 100% rename from tests/neg-custom-args/implicit-conversions.scala rename to tests/neg-custom-args/feature/implicit-conversions.scala diff --git a/tests/neg-custom-args/hidden-type-errors.check b/tests/neg-custom-args/hidden-type-errors.check deleted file mode 100644 index a373e409af2f..000000000000 --- a/tests/neg-custom-args/hidden-type-errors.check +++ /dev/null @@ -1,28 +0,0 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/hidden-type-errors/Test.scala:6:24 -------------------------------- -6 | val x = X.doSomething("XXX") // error - | ^^^^^^^^^^^^^^^^^^^^ - | Found: String - | Required: Int - | This location contains code that was inlined from Test.scala:6 - -Explanation -=========== - -Tree: t12717.A.bar("XXX") - -I tried to show that - String -conforms to - Int -but the comparison trace ended with `false`: - - ==> String <: Int - ==> String <: Int (recurring) - ==> String <: Int (recurring) - <== String <: Int (recurring) = false - <== String <: Int (recurring) = false - <== String <: Int = false - -The tests were made under the empty constraint - -1 error found diff --git a/tests/neg-custom-args/i10994.check b/tests/neg-custom-args/i10994.check new file mode 100644 index 000000000000..c540a04657c3 --- /dev/null +++ b/tests/neg-custom-args/i10994.check @@ -0,0 +1,7 @@ +-- Error: tests/neg-custom-args/i10994.scala:2:19 ---------------------------------------------------------------------- +2 | case (b: Boolean): Boolean => () // error + | ^ + | Type ascriptions after patterns other than: + | * variable pattern, e.g. `case x: String =>` + | * number literal pattern, e.g. `case 10.5: Double =>` + | are no longer supported. Remove the type ascription or move it to a separate variable pattern. diff --git a/tests/neg-custom-args/i10994.scala b/tests/neg-custom-args/i10994.scala new file mode 100644 index 000000000000..65695ccf4352 --- /dev/null +++ b/tests/neg-custom-args/i10994.scala @@ -0,0 +1,2 @@ +def foo = true match + case (b: Boolean): Boolean => () // error diff --git a/tests/neg-custom-args/i13838.check b/tests/neg-custom-args/i13838.check index 2c93e4001461..5e62779f3238 100644 --- a/tests/neg-custom-args/i13838.check +++ b/tests/neg-custom-args/i13838.check @@ -1,15 +1,15 @@ --- Error: tests/neg-custom-args/i13838.scala:10:5 ---------------------------------------------------------------------- +-- [E172] Type Error: tests/neg-custom-args/i13838.scala:10:5 ---------------------------------------------------------- 10 | foo // error | ^ - |No given instance of type Order[X] was found for parameter x$1 of method foo in object FooT - | - |where: X is a type variable - |. + |No given instance of type Order[X] was found for parameter x$1 of method foo in object FooT. |I found: | | FooT.OrderFFooA[F, A](FooT.OrderFFooA[F, A](/* missing */summon[Order[F[Foo[A]]]])) | - |But given instance OrderFFooA in object FooT produces a diverging implicit search when trying to match type Order[F[Foo[A]]]. + |But given instance OrderFFooA in object FooT produces a diverging implicit search when trying to match type Order[F[Foo[A]]] + | + |where: X is a type variable + |. -- [E168] Type Warning: tests/neg-custom-args/i13838.scala:10:5 -------------------------------------------------------- 10 | foo // error | ^ diff --git a/tests/neg-custom-args/i4060.scala b/tests/neg-custom-args/i4060.scala deleted file mode 100644 index 3d5c180b5d7b..000000000000 --- a/tests/neg-custom-args/i4060.scala +++ /dev/null @@ -1,22 +0,0 @@ -class X { type R } -class T(erased val a: X)(val value: a.R) - -object App { - def coerce[U, V](u: U): V = { - trait X { type R >: U } - trait Y { type R = V } - - class T[A <: X](erased val a: A)(val value: a.R) // error - - object O { lazy val x : Y & X = ??? } - - val a = new T[Y & X](O.x)(u) - a.value - } - - def main(args: Array[String]): Unit = { - val x: Int = coerce[String, Int]("a") - println(x + 1) - - } -} diff --git a/tests/neg-custom-args/isInstanceOf/enum-approx2.scala b/tests/neg-custom-args/isInstanceOf/enum-approx2.scala index 516b765ec64b..c7c8a6c4e1fb 100644 --- a/tests/neg-custom-args/isInstanceOf/enum-approx2.scala +++ b/tests/neg-custom-args/isInstanceOf/enum-approx2.scala @@ -7,4 +7,4 @@ class Test { case Fun(x: Exp[Int => String]) => ??? // error case _ => } -} \ No newline at end of file +} diff --git a/tests/neg-custom-args/isInstanceOf/i11178.scala b/tests/neg-custom-args/isInstanceOf/i11178.scala index 0d6867eba75f..47e8b4c3acab 100644 --- a/tests/neg-custom-args/isInstanceOf/i11178.scala +++ b/tests/neg-custom-args/isInstanceOf/i11178.scala @@ -36,4 +36,4 @@ object Test3 { case _: Bar[Boolean] => ??? // error case _ => ??? } -} \ No newline at end of file +} diff --git a/tests/neg-custom-args/isInstanceOf/i17435.scala b/tests/neg-custom-args/isInstanceOf/i17435.scala new file mode 100644 index 000000000000..e32149db3137 --- /dev/null +++ b/tests/neg-custom-args/isInstanceOf/i17435.scala @@ -0,0 +1,23 @@ +import scala.collection.mutable + +object Test: + type JsonPrimitive = String | Int | Double | Boolean | None.type + + type Rec[JA[_], JO[_], A] = A match + case JsonPrimitive => JsonPrimitive | JA[Rec[JA, JO, JsonPrimitive]] | JO[Rec[JA, JO, JsonPrimitive]] + case _ => A | JA[Rec[JA, JO, A]] | JO[Rec[JA, JO, A]] + + type Json = Rec[[A] =>> mutable.Buffer[A], [A] =>> mutable.Map[String, A], JsonPrimitive] + + type JsonObject = mutable.Map[String, Json] + + type JsonArray = mutable.Buffer[Json] + + def encode(x: Json): Int = x match + case str: String => 1 + case b: Boolean => 2 + case i: Int => 3 + case d: Double => 4 + case arr: JsonArray => 5 // error + case obj: JsonObject => 6 // error + case _ => 7 diff --git a/tests/neg-custom-args/isInstanceOf/i5826.scala b/tests/neg-custom-args/isInstanceOf/i5826.scala index bff95e740b4f..c63bf3ab4aef 100644 --- a/tests/neg-custom-args/isInstanceOf/i5826.scala +++ b/tests/neg-custom-args/isInstanceOf/i5826.scala @@ -1,6 +1,6 @@ class Foo { - def test[A]: List[Int] | A => Int = { - case ls: List[Int] => ls.head // error + def test[A]: (List[Int] | A) => Int = { + case ls: List[Int] => ls.head // error, A = List[String] case _ => 0 } @@ -17,4 +17,25 @@ class Foo { case ls: A[X] => 4 // error case _ => 0 } + + def test4[A](x: List[Int] | (A => Int)) = x match { + case ls: List[Int] => ls.head // error, List extends Int => T + case _ => 0 + } + + final class C[T] extends A[T] + + def test5[T](x: A[T] | B[T] | Option[T]): Boolean = x.isInstanceOf[C[String]] // error + + def test6[T](x: A[T] | B[T] | Option[T]): Boolean = x.isInstanceOf[C[T]] + + def test7[A](x: Option[Int] | (A => Int)) = x match { + case ls: Option[Int] => ls.head // OK, Option decomposes to Some and None + case _ => 0 + } + + def test8(x: List[Int] | A[String]) = x match { + case ls: List[Int] => ls.head // OK, List decomposes to :: and Nil + case _ => 0 + } } diff --git a/tests/neg-custom-args/isInstanceOf/i8932.scala b/tests/neg-custom-args/isInstanceOf/i8932.scala index f77c28c7b0a7..84d2f7d4990a 100644 --- a/tests/neg-custom-args/isInstanceOf/i8932.scala +++ b/tests/neg-custom-args/isInstanceOf/i8932.scala @@ -9,4 +9,4 @@ def bugReport[A](foo: Foo[A]): Foo[A] = case dummy: Dummy => ??? } -def test = bugReport(new Dummy: Foo[String]) \ No newline at end of file +def test = bugReport(new Dummy: Foo[String]) diff --git a/tests/neg-custom-args/no-experimental/14034.scala b/tests/neg-custom-args/no-experimental/14034.scala index c0b4cc6899db..ab824c43395e 100644 --- a/tests/neg-custom-args/no-experimental/14034.scala +++ b/tests/neg-custom-args/no-experimental/14034.scala @@ -7,6 +7,6 @@ type Foo0 = Exp // error type Foo = Option[Exp] // error type Bar = Option[exp.type] // error type Baz = Exp | Int // error -type Quux = [X] =>> X match // error - case Exp => Int +type Quux = [X] =>> X match + case Exp => Int // error type Quuz[A <: Exp] = Int // error diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala b/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala index 85076cca723a..a4962c6153a0 100644 --- a/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala +++ b/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala @@ -1,7 +1,6 @@ import annotation.experimental class Class1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @@ -9,7 +8,6 @@ class Class1: def g = 1 object Object1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @@ -17,7 +15,6 @@ object Object1: def g = 1 def fun1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @@ -25,7 +22,6 @@ def fun1 = def g = 1 val value1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala b/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala index 1af04918b1d9..77fbe41479d2 100644 --- a/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala +++ b/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala @@ -1,25 +1,21 @@ import annotation.experimental class Class1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition object Object1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition def fun1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition val value1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala b/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala index b9fc38dc4915..180c43b9f671 100644 --- a/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala +++ b/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala @@ -1,28 +1,24 @@ import annotation.experimental class Class1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @experimental def f = 1 object Object1: - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @experimental def f = 1 def fun1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @experimental def f = 1 val value1 = - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition diff --git a/tests/neg-custom-args/no-experimental/experimental-package-imports.scala b/tests/neg-custom-args/no-experimental/experimental-package-imports.scala index 90ec387b1036..047b3eb61e82 100644 --- a/tests/neg-custom-args/no-experimental/experimental-package-imports.scala +++ b/tests/neg-custom-args/no-experimental/experimental-package-imports.scala @@ -1,7 +1,6 @@ import annotation.experimental package foo { - import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error import language.experimental.erasedDefinitions // ok: only check at erased definition @@ -13,7 +12,6 @@ package foo { package foo2 { // ok: all definitions are top-level @experimental - import language.experimental.fewerBraces import language.experimental.namedTypeArguments import language.experimental.genericNumberLiterals import language.experimental.erasedDefinitions diff --git a/tests/neg-custom-args/no-experimental/experimentalInheritance.scala b/tests/neg-custom-args/no-experimental/experimentalInheritance.scala deleted file mode 100644 index f6eab1224310..000000000000 --- a/tests/neg-custom-args/no-experimental/experimentalInheritance.scala +++ /dev/null @@ -1,14 +0,0 @@ -import scala.annotation.experimental - -@experimental def x = 2 - -@experimental class A1(x: Any) -class A2(x: Any) - - -@experimental class B1 extends A1(1) -class B2 // error: extension of experimental class A1 must have @experimental annotation -extends A1(1) // error: class A1 is marked @experimental ... - -@experimental class C1 extends A2(x) -class C2 extends A2(x) // error def x is marked @experimental and therefore diff --git a/tests/neg-custom-args/no-experimental/experimentalInline.scala b/tests/neg-custom-args/no-experimental/experimentalInline.scala index 8827fd42e36a..eb49bf15d11a 100644 --- a/tests/neg-custom-args/no-experimental/experimentalInline.scala +++ b/tests/neg-custom-args/no-experimental/experimentalInline.scala @@ -4,5 +4,5 @@ import scala.annotation.experimental inline def g() = () def test: Unit = - g() // errors + g() // error () diff --git a/tests/neg-custom-args/no-experimental/experimentalInline2.scala b/tests/neg-custom-args/no-experimental/experimentalInline2.scala new file mode 100644 index 000000000000..c40eb050a832 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalInline2.scala @@ -0,0 +1,8 @@ +import scala.annotation.experimental + +@experimental +transparent inline def g() = () + +def test: Unit = + g() // error + () diff --git a/tests/neg-custom-args/no-experimental/i17292.scala b/tests/neg-custom-args/no-experimental/i17292.scala new file mode 100644 index 000000000000..381d252dbea8 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/i17292.scala @@ -0,0 +1,7 @@ +import annotation.experimental + +class Foo { @experimental type Bar = (Int, String) } + +val f: Foo = Foo() + +def g: Tuple.Elem[f.Bar, 0] = ??? // error diff --git a/tests/neg-custom-args/no-experimental/i17292b.scala b/tests/neg-custom-args/no-experimental/i17292b.scala new file mode 100644 index 000000000000..f644dd60ecd5 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/i17292b.scala @@ -0,0 +1,21 @@ +import annotation.experimental +type A[T] = Int +class Foo { + @experimental type Bar = (Int, String) +} + +type Elem1[X <: Tuple, N <: Int] = X match { case x *: xs => N match { case 0 => x } } +type Elem2[X <: Tuple, N <: Int] + +val f: Foo = Foo() + +def bar1: f.Bar = ??? // error +def bar2 = // error + ??? : f.Bar // error + +def g0: Elem1[f.Bar, 0] = ??? // error +def g1(a: Elem1[f.Bar, 0]) = ??? // error +def g2 = + ??? : Elem1[f.Bar, 0] // error + +def h: Elem2[f.Bar, 0] = ??? // error diff --git a/tests/neg-macros/annot-MacroAnnotation-direct.check b/tests/neg-macros/annot-MacroAnnotation-direct.check new file mode 100644 index 000000000000..580b2bcc7639 --- /dev/null +++ b/tests/neg-macros/annot-MacroAnnotation-direct.check @@ -0,0 +1,6 @@ +-- [E042] Type Error: tests/neg-macros/annot-MacroAnnotation-direct.scala:3:0 ------------------------------------------ +3 |@MacroAnnotation // error + |^^^^^^^^^^^^^^^^ + |MacroAnnotation is a trait; it cannot be instantiated + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-macros/annot-MacroAnnotation-direct.scala b/tests/neg-macros/annot-MacroAnnotation-direct.scala new file mode 100644 index 000000000000..a0024457dc48 --- /dev/null +++ b/tests/neg-macros/annot-MacroAnnotation-direct.scala @@ -0,0 +1,4 @@ +import scala.annotation.MacroAnnotation + +@MacroAnnotation // error +def test = () diff --git a/tests/neg-macros/annot-accessIndirect/Macro_1.scala b/tests/neg-macros/annot-accessIndirect/Macro_1.scala new file mode 100644 index 000000000000..8679edcfc0c3 --- /dev/null +++ b/tests/neg-macros/annot-accessIndirect/Macro_1.scala @@ -0,0 +1,11 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class hello extends MacroAnnotation { + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + val helloSymbol = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("hello"), TypeRepr.of[String], Flags.EmptyFlags, Symbol.noSymbol) + val helloVal = ValDef(helloSymbol, Some(Literal(StringConstant("Hello, World!")))) + List(helloVal, tree) +} diff --git a/tests/neg-macros/annot-accessIndirect/Macro_2.scala b/tests/neg-macros/annot-accessIndirect/Macro_2.scala new file mode 100644 index 000000000000..d069175ce166 --- /dev/null +++ b/tests/neg-macros/annot-accessIndirect/Macro_2.scala @@ -0,0 +1,18 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class foo extends MacroAnnotation { + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + val s = '{@hello def foo1(x: Int): Int = x + 1;()}.asTerm + val fooDef = s.asInstanceOf[Inlined].body.asInstanceOf[Block].statements.head.asInstanceOf[DefDef] + val hello = Ref(Symbol.spliceOwner.declaredFields("hello").head).asExprOf[String] // error + tree match + case DefDef(name, params, tpt, Some(t)) => + val rhs = '{ + ${t.asExprOf[String]} + $hello + }.asTerm + val newDef = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + List(fooDef, newDef) +} diff --git a/tests/neg-macros/annot-accessIndirect/Test.scala b/tests/neg-macros/annot-accessIndirect/Test.scala new file mode 100644 index 000000000000..6e2bbd3d3361 --- /dev/null +++ b/tests/neg-macros/annot-accessIndirect/Test.scala @@ -0,0 +1,3 @@ +class Bar: + @foo def bar(x: String): String = x // error + bar("a") diff --git a/tests/neg-macros/annot-crash.check b/tests/neg-macros/annot-crash.check new file mode 100644 index 000000000000..16eb0f68bc44 --- /dev/null +++ b/tests/neg-macros/annot-crash.check @@ -0,0 +1,8 @@ + +-- Error: tests/neg-macros/annot-crash/Test_2.scala:1:0 ---------------------------------------------------------------- +1 |@crash // error + |^^^^^^ + |Failed to evaluate macro. + | Caused by class scala.NotImplementedError: an implementation is missing + | scala.Predef$.$qmark$qmark$qmark(Predef.scala:344) + | crash.transform(Macro_1.scala:7) diff --git a/tests/neg-macros/annot-crash/Macro_1.scala b/tests/neg-macros/annot-crash/Macro_1.scala new file mode 100644 index 000000000000..f3d5b3f602f8 --- /dev/null +++ b/tests/neg-macros/annot-crash/Macro_1.scala @@ -0,0 +1,8 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class crash extends MacroAnnotation { + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + ??? +} diff --git a/tests/neg-macros/annot-crash/Test_2.scala b/tests/neg-macros/annot-crash/Test_2.scala new file mode 100644 index 000000000000..3e8fd3cf785f --- /dev/null +++ b/tests/neg-macros/annot-crash/Test_2.scala @@ -0,0 +1,2 @@ +@crash // error +def test = () diff --git a/tests/neg-macros/annot-empty-result.check b/tests/neg-macros/annot-empty-result.check new file mode 100644 index 000000000000..6d43c19664cb --- /dev/null +++ b/tests/neg-macros/annot-empty-result.check @@ -0,0 +1,13 @@ + +-- Error: tests/neg-macros/annot-empty-result/Test_2.scala:5:2 --------------------------------------------------------- +5 | @nilAnnot // error + | ^^^^^^^^^ + | Unexpected `Nil` returned by `(new nilAnnot()).transform(..)` during macro expansion +-- Error: tests/neg-macros/annot-empty-result/Test_2.scala:9:4 --------------------------------------------------------- +9 | @nilAnnot // error + | ^^^^^^^^^ + | Unexpected `Nil` returned by `(new nilAnnot()).transform(..)` during macro expansion +-- Error: tests/neg-macros/annot-empty-result/Test_2.scala:1:0 --------------------------------------------------------- +1 |@nilAnnot // error + |^^^^^^^^^ + |Unexpected `Nil` returned by `(new nilAnnot()).transform(..)` during macro expansion diff --git a/tests/neg-macros/annot-empty-result/Macro_1.scala b/tests/neg-macros/annot-empty-result/Macro_1.scala new file mode 100644 index 000000000000..ff3be61c05d2 --- /dev/null +++ b/tests/neg-macros/annot-empty-result/Macro_1.scala @@ -0,0 +1,8 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class nilAnnot extends MacroAnnotation { + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + Nil +} diff --git a/tests/neg-macros/annot-empty-result/Test_2.scala b/tests/neg-macros/annot-empty-result/Test_2.scala new file mode 100644 index 000000000000..84beeafecc24 --- /dev/null +++ b/tests/neg-macros/annot-empty-result/Test_2.scala @@ -0,0 +1,11 @@ +@nilAnnot // error +def f1 = 1 + +class B: + @nilAnnot // error + def f2 = 2 + + def test = + @nilAnnot // error + def f3 = 2 + () diff --git a/tests/neg-macros/annot-error-annot.check b/tests/neg-macros/annot-error-annot.check new file mode 100644 index 000000000000..f150b4561e2c --- /dev/null +++ b/tests/neg-macros/annot-error-annot.check @@ -0,0 +1,127 @@ + +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:17:6 --------------------------------------------------------- +16 |@error +17 |class cGlobal // error + |^ + |MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:20:7 --------------------------------------------------------- +19 |@error +20 |object oGlobal // error + |^ + |MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:24:6 --------------------------------------------------------- +23 | @error +24 | val vMember: Int = 1 // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:26:11 -------------------------------------------------------- +25 | @error +26 | lazy val lvMember: Int = 1 // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:28:6 --------------------------------------------------------- +27 | @error +28 | def dMember: Int = 1 // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:30:8 --------------------------------------------------------- +29 | @error +30 | given gMember: Int = 1 // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:32:8 --------------------------------------------------------- +31 | @error +32 | given gMember2: Num[Int] with // error + | ^ + | MACRO ERROR +33 | def zero = 0 +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:35:8 --------------------------------------------------------- +34 | @error +35 | given gMember3(using DummyImplicit): Num[Int] with // error + | ^ + | MACRO ERROR +36 | def zero = 0 +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:39:8 --------------------------------------------------------- +38 | @error +39 | class cMember // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:42:9 --------------------------------------------------------- +41 | @error +42 | object oMember // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:46:8 --------------------------------------------------------- +45 | @error +46 | val vLocal: Int = 1 // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:48:13 -------------------------------------------------------- +47 | @error +48 | lazy val lvLocal: Int = 1 // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:50:8 --------------------------------------------------------- +49 | @error +50 | def dLocal: Int = 1 // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:52:10 -------------------------------------------------------- +51 | @error +52 | given gLocal: Int = 1 // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:54:10 -------------------------------------------------------- +53 | @error +54 | given gLocal2: Num[Int] with // error + | ^ + | MACRO ERROR +55 | def zero = 0 +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:57:10 -------------------------------------------------------- +56 | @error +57 | given gLocal3(using DummyImplicit): Num[Int] with // error + | ^ + | MACRO ERROR +58 | def zero = 0 +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:61:10 -------------------------------------------------------- +60 | @error +61 | class cLocal // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:63:11 -------------------------------------------------------- +62 | @error +63 | object oLocal // error + | ^ + | MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:2:4 ---------------------------------------------------------- +1 |@error +2 |val vGlobal: Int = 1 // error + |^ + |MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:4:9 ---------------------------------------------------------- +3 |@error +4 |lazy val lvGlobal: Int = 1 // error + |^ + |MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:6:4 ---------------------------------------------------------- +5 |@error +6 |def dGlobal: Int = 1 // error + |^ + |MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:8:6 ---------------------------------------------------------- +7 |@error +8 |given gGlobal: Int = 1 // error + |^ + |MACRO ERROR +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:10:6 --------------------------------------------------------- + 9 |@error +10 |given gGlobal2: Num[Int] with // error + |^ + |MACRO ERROR +11 | def zero = 0 +-- Error: tests/neg-macros/annot-error-annot/Test_2.scala:13:6 --------------------------------------------------------- +12 |@error +13 |given gGlobal3(using DummyImplicit): Num[Int] with // error + |^ + |MACRO ERROR +14 | def zero = 0 diff --git a/tests/neg-macros/annot-error-annot/Macro_1.scala b/tests/neg-macros/annot-error-annot/Macro_1.scala new file mode 100644 index 000000000000..d54b69903e02 --- /dev/null +++ b/tests/neg-macros/annot-error-annot/Macro_1.scala @@ -0,0 +1,9 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class error extends MacroAnnotation { + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + quotes.reflect.report.error("MACRO ERROR", tree.pos) + List(tree) +} diff --git a/tests/neg-macros/annot-error-annot/Test_2.scala b/tests/neg-macros/annot-error-annot/Test_2.scala new file mode 100644 index 000000000000..3325ba431127 --- /dev/null +++ b/tests/neg-macros/annot-error-annot/Test_2.scala @@ -0,0 +1,67 @@ +@error +val vGlobal: Int = 1 // error +@error +lazy val lvGlobal: Int = 1 // error +@error +def dGlobal: Int = 1 // error +@error +given gGlobal: Int = 1 // error +@error +given gGlobal2: Num[Int] with // error + def zero = 0 +@error +given gGlobal3(using DummyImplicit): Num[Int] with // error + def zero = 0 + +@error +class cGlobal // error + +@error +object oGlobal // error + +class B: + @error + val vMember: Int = 1 // error + @error + lazy val lvMember: Int = 1 // error + @error + def dMember: Int = 1 // error + @error + given gMember: Int = 1 // error + @error + given gMember2: Num[Int] with // error + def zero = 0 + @error + given gMember3(using DummyImplicit): Num[Int] with // error + def zero = 0 + + @error + class cMember // error + + @error + object oMember // error + + def locals: Unit = + @error + val vLocal: Int = 1 // error + @error + lazy val lvLocal: Int = 1 // error + @error + def dLocal: Int = 1 // error + @error + given gLocal: Int = 1 // error + @error + given gLocal2: Num[Int] with // error + def zero = 0 + @error + given gLocal3(using DummyImplicit): Num[Int] with // error + def zero = 0 + + @error + class cLocal // error + @error + object oLocal // error + () + +trait Num[T]: + def zero: T diff --git a/tests/neg-macros/annot-ill-abort.check b/tests/neg-macros/annot-ill-abort.check new file mode 100644 index 000000000000..b969b3ad4313 --- /dev/null +++ b/tests/neg-macros/annot-ill-abort.check @@ -0,0 +1,5 @@ + +-- Error: tests/neg-macros/annot-ill-abort/Test_2.scala:1:0 ------------------------------------------------------------ +1 |@crash // error + |^^^^^^ + |Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion. diff --git a/tests/neg-macros/annot-ill-abort/Macro_1.scala b/tests/neg-macros/annot-ill-abort/Macro_1.scala new file mode 100644 index 000000000000..446ce0a5331b --- /dev/null +++ b/tests/neg-macros/annot-ill-abort/Macro_1.scala @@ -0,0 +1,8 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class crash extends MacroAnnotation { + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + throw new scala.quoted.runtime.StopMacroExpansion +} diff --git a/tests/neg-macros/annot-ill-abort/Test_2.scala b/tests/neg-macros/annot-ill-abort/Test_2.scala new file mode 100644 index 000000000000..3e8fd3cf785f --- /dev/null +++ b/tests/neg-macros/annot-ill-abort/Test_2.scala @@ -0,0 +1,2 @@ +@crash // error +def test = () diff --git a/tests/neg-macros/annot-mod-class-add-top-method.check b/tests/neg-macros/annot-mod-class-add-top-method.check new file mode 100644 index 000000000000..28fb93bb29db --- /dev/null +++ b/tests/neg-macros/annot-mod-class-add-top-method.check @@ -0,0 +1,9 @@ + +-- Error: tests/neg-macros/annot-mod-class-add-top-method/Test_2.scala:1:0 --------------------------------------------- +1 |@addTopLevelMethod // error + |^^^^^^^^^^^^^^^^^^ + |macro annotation can not add top-level method. @addTopLevelMethod tried to add method toLevelMethod$macro$1. +-- Error: tests/neg-macros/annot-mod-class-add-top-method/Test_2.scala:4:0 --------------------------------------------- +4 |@addTopLevelMethod // error + |^^^^^^^^^^^^^^^^^^ + |macro annotation can not add top-level method. @addTopLevelMethod tried to add method toLevelMethod$macro$2. diff --git a/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala b/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala new file mode 100644 index 000000000000..b5c49695ad2a --- /dev/null +++ b/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala @@ -0,0 +1,17 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ +import scala.collection.mutable + +@experimental +class addTopLevelMethod extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + tree match + case ClassDef(name, ctr, parents, self, body) => + val methType = MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Int]) + val methSym = Symbol.newMethod(Symbol.spliceOwner, Symbol.freshName("toLevelMethod"), methType, Flags.EmptyFlags, Symbol.noSymbol) + val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) + List(methDef, tree) + case _ => + report.error("Annotation only supports `class`") + List(tree) diff --git a/tests/neg-macros/annot-mod-class-add-top-method/Test_2.scala b/tests/neg-macros/annot-mod-class-add-top-method/Test_2.scala new file mode 100644 index 000000000000..eadeff0f060c --- /dev/null +++ b/tests/neg-macros/annot-mod-class-add-top-method/Test_2.scala @@ -0,0 +1,5 @@ +@addTopLevelMethod // error +class Foo + +@addTopLevelMethod // error +object Foo diff --git a/tests/neg-macros/annot-mod-class-add-top-val.check b/tests/neg-macros/annot-mod-class-add-top-val.check new file mode 100644 index 000000000000..bc21173923f1 --- /dev/null +++ b/tests/neg-macros/annot-mod-class-add-top-val.check @@ -0,0 +1,9 @@ + +-- Error: tests/neg-macros/annot-mod-class-add-top-val/Test_2.scala:1:0 ------------------------------------------------ +1 |@addTopLevelVal // error + |^^^^^^^^^^^^^^^ + |macro annotation can not add top-level value. @addTopLevelVal tried to add value toLevelVal$macro$1. +-- Error: tests/neg-macros/annot-mod-class-add-top-val/Test_2.scala:4:0 ------------------------------------------------ +4 |@addTopLevelVal // error + |^^^^^^^^^^^^^^^ + |macro annotation can not add top-level value. @addTopLevelVal tried to add value toLevelVal$macro$2. diff --git a/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala b/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala new file mode 100644 index 000000000000..c6f21e181879 --- /dev/null +++ b/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala @@ -0,0 +1,16 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ +import scala.collection.mutable + +@experimental +class addTopLevelVal extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + tree match + case ClassDef(name, ctr, parents, self, body) => + val valSym = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("toLevelVal"), TypeRepr.of[Int], Flags.EmptyFlags, Symbol.noSymbol) + val valDef = ValDef(valSym, Some(Literal(IntConstant(1)))) + List(valDef, tree) + case _ => + report.error("Annotation only supports `class`") + List(tree) diff --git a/tests/neg-macros/annot-mod-class-add-top-val/Test_2.scala b/tests/neg-macros/annot-mod-class-add-top-val/Test_2.scala new file mode 100644 index 000000000000..440e90bc1652 --- /dev/null +++ b/tests/neg-macros/annot-mod-class-add-top-val/Test_2.scala @@ -0,0 +1,5 @@ +@addTopLevelVal // error +class Foo + +@addTopLevelVal // error +object Foo diff --git a/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala b/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala new file mode 100644 index 000000000000..45679b65c03b --- /dev/null +++ b/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala @@ -0,0 +1,13 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ +import scala.collection.mutable + +@experimental +// Assumes annotation is on top level def or val +class addTopLevelMethodOutsidePackageObject extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + val methType = MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Int]) + val methSym = Symbol.newMethod(Symbol.spliceOwner.owner, Symbol.freshName("toLevelMethod"), methType, Flags.EmptyFlags, Symbol.noSymbol) + val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) + List(methDef, tree) diff --git a/tests/neg-macros/annot-mod-top-method-add-top-method/Test_2.scala b/tests/neg-macros/annot-mod-top-method-add-top-method/Test_2.scala new file mode 100644 index 000000000000..151b722a0dda --- /dev/null +++ b/tests/neg-macros/annot-mod-top-method-add-top-method/Test_2.scala @@ -0,0 +1,5 @@ +@addTopLevelMethodOutsidePackageObject // error +def foo = 1 + +@addTopLevelMethodOutsidePackageObject // error +val bar = 1 diff --git a/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala b/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala new file mode 100644 index 000000000000..c6c4c32afcb8 --- /dev/null +++ b/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala @@ -0,0 +1,12 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ +import scala.collection.mutable + +@experimental +// Assumes annotation is on top level def or val +class addTopLevelValOutsidePackageObject extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + val valSym = Symbol.newVal(Symbol.spliceOwner.owner, Symbol.freshName("toLevelVal"), TypeRepr.of[Int], Flags.EmptyFlags, Symbol.noSymbol) + val valDef = ValDef(valSym, Some(Literal(IntConstant(1)))) + List(valDef, tree) diff --git a/tests/neg-macros/annot-mod-top-method-add-top-val/Test_2.scala b/tests/neg-macros/annot-mod-top-method-add-top-val/Test_2.scala new file mode 100644 index 000000000000..076a636267ab --- /dev/null +++ b/tests/neg-macros/annot-mod-top-method-add-top-val/Test_2.scala @@ -0,0 +1,5 @@ +@addTopLevelValOutsidePackageObject // error +def foo = 1 + +@addTopLevelValOutsidePackageObject // error +val bar = 1 diff --git a/tests/neg-macros/annot-nested.scala b/tests/neg-macros/annot-nested.scala new file mode 100644 index 000000000000..4365e41eefff --- /dev/null +++ b/tests/neg-macros/annot-nested.scala @@ -0,0 +1,42 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +class Foo: + @experimental + class void extends MacroAnnotation: // error + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) + + object Bar: + @experimental + class void extends MacroAnnotation: // error + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) + +class Foo2: + @experimental + trait void extends MacroAnnotation // error + + object Bar: + @experimental + trait void extends MacroAnnotation // error + +def test: Unit = + @experimental + class void extends MacroAnnotation: // error + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) + + trait void2 extends MacroAnnotation // error + + new MacroAnnotation {} // error + + () + +val test2: Unit = + @experimental + class void extends MacroAnnotation: // error + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) + + trait void2 extends MacroAnnotation // error + + new MacroAnnotation {} // error + + () diff --git a/tests/neg-macros/annot-on-type.check b/tests/neg-macros/annot-on-type.check new file mode 100644 index 000000000000..3844c3eeebe9 --- /dev/null +++ b/tests/neg-macros/annot-on-type.check @@ -0,0 +1,16 @@ + +-- Error: tests/neg-macros/annot-on-type/Test_2.scala:6:7 -------------------------------------------------------------- +5 | @voidAnnot +6 | type C // error + | ^ + | macro annotations are not supported on type +-- Error: tests/neg-macros/annot-on-type/Test_2.scala:10:9 ------------------------------------------------------------- + 9 | @voidAnnot +10 | type D // error + | ^ + | macro annotations are not supported on type +-- Error: tests/neg-macros/annot-on-type/Test_2.scala:2:5 -------------------------------------------------------------- +1 |@voidAnnot +2 |type A // error + |^ + |macro annotations are not supported on type diff --git a/tests/neg-macros/annot-on-type/Macro_1.scala b/tests/neg-macros/annot-on-type/Macro_1.scala new file mode 100644 index 000000000000..7468c5a200a6 --- /dev/null +++ b/tests/neg-macros/annot-on-type/Macro_1.scala @@ -0,0 +1,8 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class voidAnnot extends MacroAnnotation { + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + List(tree) +} diff --git a/tests/neg-macros/annot-on-type/Test_2.scala b/tests/neg-macros/annot-on-type/Test_2.scala new file mode 100644 index 000000000000..4dfe1cc76d42 --- /dev/null +++ b/tests/neg-macros/annot-on-type/Test_2.scala @@ -0,0 +1,11 @@ +@voidAnnot +type A // error + +object B: + @voidAnnot + type C // error + + def test = + @voidAnnot + type D // error + () diff --git a/tests/neg-macros/annot-result-owner.check b/tests/neg-macros/annot-result-owner.check new file mode 100644 index 000000000000..5d67be058fdf --- /dev/null +++ b/tests/neg-macros/annot-result-owner.check @@ -0,0 +1,9 @@ + +-- Error: tests/neg-macros/annot-result-owner/Test_2.scala:1:0 --------------------------------------------------------- +1 |@insertVal // error + |^^^^^^^^^^ + |macro annotation @insertVal added value definitionWithWrongOwner$macro$1 with an inconsistent owner. Expected it to be owned by package object Test_2$package but was owned by method foo. +-- Error: tests/neg-macros/annot-result-owner/Test_2.scala:5:2 --------------------------------------------------------- +5 | @insertVal // error + | ^^^^^^^^^^ + |macro annotation @insertVal added value definitionWithWrongOwner$macro$2 with an inconsistent owner. Expected it to be owned by method bar but was owned by method foo. diff --git a/tests/neg-macros/annot-result-owner/Macro_1.scala b/tests/neg-macros/annot-result-owner/Macro_1.scala new file mode 100644 index 000000000000..34f7541f726b --- /dev/null +++ b/tests/neg-macros/annot-result-owner/Macro_1.scala @@ -0,0 +1,11 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class insertVal extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + // Use of wrong owner + val valSym = Symbol.newVal(tree.symbol, Symbol.freshName("definitionWithWrongOwner"), TypeRepr.of[Unit], Flags.Private, Symbol.noSymbol) + val valDef = ValDef(valSym, Some('{}.asTerm)) + List(valDef, tree) diff --git a/tests/neg-macros/annot-result-owner/Test_2.scala b/tests/neg-macros/annot-result-owner/Test_2.scala new file mode 100644 index 000000000000..5bcebb1ecf76 --- /dev/null +++ b/tests/neg-macros/annot-result-owner/Test_2.scala @@ -0,0 +1,6 @@ +@insertVal // error +def foo(): Unit = () + +def bar = + @insertVal // error + def foo(): Unit = () diff --git a/tests/neg-macros/annot-suspend-cycle.check b/tests/neg-macros/annot-suspend-cycle.check new file mode 100644 index 000000000000..237cbe4188b2 --- /dev/null +++ b/tests/neg-macros/annot-suspend-cycle.check @@ -0,0 +1,12 @@ +-- [E129] Potential Issue Warning: tests/neg-macros/annot-suspend-cycle/Macro.scala:7:4 -------------------------------- +7 | new Foo + | ^^^^^^^ + | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | + | longer explanation available when compiling with `-explain` +Cyclic macro dependencies in tests/neg-macros/annot-suspend-cycle/Test.scala. +Compilation stopped since no further progress can be made. + +To fix this, place macros in one set of files and their callers in another. + +Compiling with -Xprint-suspension gives more information. diff --git a/tests/neg-macros/annot-suspend-cycle/Macro.scala b/tests/neg-macros/annot-suspend-cycle/Macro.scala new file mode 100644 index 000000000000..4143e2c32062 --- /dev/null +++ b/tests/neg-macros/annot-suspend-cycle/Macro.scala @@ -0,0 +1,9 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class cycle extends MacroAnnotation { + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + new Foo + List(tree) +} diff --git a/tests/neg-macros/annot-suspend-cycle/Test.scala b/tests/neg-macros/annot-suspend-cycle/Test.scala new file mode 100644 index 000000000000..c1e1289742c1 --- /dev/null +++ b/tests/neg-macros/annot-suspend-cycle/Test.scala @@ -0,0 +1,5 @@ +// nopos-error +class Foo + +@cycle +def test = () diff --git a/tests/neg-macros/i10127-a.scala b/tests/neg-macros/i10127-a.scala index 3e23cf10bd30..2da4d0924870 100644 --- a/tests/neg-macros/i10127-a.scala +++ b/tests/neg-macros/i10127-a.scala @@ -1,7 +1,7 @@ import scala.quoted.* object T { - def impl[A](using t: Type[A])(using Quotes): Expr[Unit] = { + def impl[A](t: Type[A])(using Quotes): Expr[Unit] = { Expr.summon[t.Underlying] // error '{} } diff --git a/tests/neg-macros/i10127-b.scala b/tests/neg-macros/i10127-b.scala index 2e87e92efa63..13992bf95362 100644 --- a/tests/neg-macros/i10127-b.scala +++ b/tests/neg-macros/i10127-b.scala @@ -4,7 +4,7 @@ case class T(x: Type[_ <: Any]) object T { def impl[A](t: T)(using ctx: Quotes): Expr[Unit] = { - Expr.summon[t.x.Underlying] // error // error + Expr.summon[t.x.Underlying] // error '{} } } \ No newline at end of file diff --git a/tests/neg-macros/i13376a.scala b/tests/neg-macros/i13376a.scala new file mode 100644 index 000000000000..563513eed232 --- /dev/null +++ b/tests/neg-macros/i13376a.scala @@ -0,0 +1,6 @@ +import scala.quoted.* +trait C: + type T + def foo: T +inline def makro(inline x: C): x.T = ${ impl[x.type]('x) } // error // error +def impl[CC <: C](xp: Expr[CC])(using Quotes): Expr[CC#T] = '{ $xp.foo } diff --git a/tests/neg-macros/i13376b.scala b/tests/neg-macros/i13376b.scala new file mode 100644 index 000000000000..8866c24102fd --- /dev/null +++ b/tests/neg-macros/i13376b.scala @@ -0,0 +1,6 @@ +import scala.quoted.* +trait C: + type T + def foo: T +inline def makro(x: C): x.T = ${ impl[x.type]('x) } +def impl[CC <: C](xp: Expr[CC])(using Quotes): Expr[CC#T] = '{ $xp.foo } // error diff --git a/tests/neg-macros/i14123a.scala b/tests/neg-macros/i14123a.scala new file mode 100644 index 000000000000..29978f85102c --- /dev/null +++ b/tests/neg-macros/i14123a.scala @@ -0,0 +1,4 @@ +import scala.quoted._ + +def f(foo: Any => Any)(using Quotes): Expr[Any] = + '{ println(${ foo[Int]('{???}); ??? }) } // error diff --git a/tests/neg-macros/i14123b.scala b/tests/neg-macros/i14123b.scala new file mode 100644 index 000000000000..80cadf518766 --- /dev/null +++ b/tests/neg-macros/i14123b.scala @@ -0,0 +1,23 @@ +package x + +import scala.quoted._ + +object Impl { + + sealed trait UpdateOp[+T] + case class Assignment[T](value:Expr[T]) extends UpdateOp[T] + case class Update(operation:Expr[Unit]) extends UpdateOp[Nothing] + + def genRead[B:Type](newBuilder: Expr[B], + readVal: (Expr[B]) => UpdateOp[B] + )(using Quotes): Expr[B] = + '{ + var x = $newBuilder + ${readVal[B]('x) match { // error: method apply in trait Function1 does not take type parameters + case Assignment(value) => '{ x = $value } // error + case Update(operation) => operation // error + }} + x + } + +} diff --git a/tests/neg-macros/i15917.scala b/tests/neg-macros/i15917.scala new file mode 100644 index 000000000000..3eecc38b21f9 --- /dev/null +++ b/tests/neg-macros/i15917.scala @@ -0,0 +1,6 @@ +import scala.quoted.* + +def m(using Quotes): Expr[Option[_]] = + val s = 3 + type st = s.type + '{ Some(${ Expr(s) }: st) } // error diff --git a/tests/neg-macros/i16355a.scala b/tests/neg-macros/i16355a.scala new file mode 100644 index 000000000000..8870b7777263 --- /dev/null +++ b/tests/neg-macros/i16355a.scala @@ -0,0 +1,35 @@ +//> using scala "3.2.1" +import scala.quoted.Expr +import scala.quoted.Type +import scala.quoted.quotes +import scala.quoted.Quotes + +object macros { + + inline transparent def mkNames[A]: List[Any] = ${ mkNamesImpl[A] } + + def mkNamesImpl[A: Type](using Quotes): Expr[List[Any]] = { + import quotes.reflect._ + + val fieldNames = TypeRepr.of[A].typeSymbol.declaredFields.map(_.name) + + val types = fieldNames + .map { f => + val t1 = ConstantType(StringConstant(f)) + t1.asType match { + case '[t1Type] => TypeRepr.of[(t1Type, "aa")] + } + } + .reduceLeft[TypeRepr](OrType(_, _)) + + types.asType match { + case '[ttt] => + Expr.ofList[ttt]( + fieldNames.map { v => + Expr[(v.type, "aa")](v -> "aa").asExprOf[ttt] // error + } + ) + } + } + +} diff --git a/tests/neg-macros/i16355b.scala b/tests/neg-macros/i16355b.scala new file mode 100644 index 000000000000..763810979ddf --- /dev/null +++ b/tests/neg-macros/i16355b.scala @@ -0,0 +1,4 @@ +import scala.quoted._ +def test(v: String)(using Quotes): Any = + Type.of : Type[v.type] // error + Type.of[v.type] // error diff --git a/tests/neg-macros/i16532.check b/tests/neg-macros/i16532.check new file mode 100644 index 000000000000..45dc9d07dcaf --- /dev/null +++ b/tests/neg-macros/i16532.check @@ -0,0 +1,8 @@ +-- Error: tests/neg-macros/i16532.scala:7:13 --------------------------------------------------------------------------- +7 | val x2 = recurseII($a, $b) // error + | ^^^^^^^^^ + |access to method recurseII from wrong staging level: + | - the definition is at level 0, + | - but the access is at level 1. + | + |Hint: Staged references to inline definition in quotes are only inlined after the quote is spliced into level 0 code by a macro. Try moving this inline definition in a statically accessible location such as an object (this definition can be private). diff --git a/tests/neg-macros/i16532.scala b/tests/neg-macros/i16532.scala new file mode 100644 index 000000000000..d1edfdd80088 --- /dev/null +++ b/tests/neg-macros/i16532.scala @@ -0,0 +1,9 @@ +import scala.quoted.* + +def power0Impl(a: Expr[Int], b: Expr[Int])(using Quotes): Expr[Int] = + inline def recurseII(a:Int, n:Int): Int = ??? + + '{ + val x2 = recurseII($a, $b) // error + x2 + } diff --git a/tests/neg-macros/i16582.check b/tests/neg-macros/i16582.check new file mode 100644 index 000000000000..c06fe0d9829f --- /dev/null +++ b/tests/neg-macros/i16582.check @@ -0,0 +1,15 @@ + +-- Error: tests/neg-macros/i16582/Test_2.scala:5:27 -------------------------------------------------------------------- +5 | val o2 = ownerDoesNotWork(2) // error + | ^^^^^^^^^^^^^^^^^^^ + | Exception occurred while executing macro expansion. + | dotty.tools.dotc.core.CyclicReference: Recursive value o2 needs type + | + | See full stack trace using -Ydebug + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Macro_1.scala:7 +7 | ${ownerWorksImpl('in)} + | ^^^^^^^^^^^^^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/i16582/Macro_1.scala b/tests/neg-macros/i16582/Macro_1.scala new file mode 100644 index 000000000000..c28c83166607 --- /dev/null +++ b/tests/neg-macros/i16582/Macro_1.scala @@ -0,0 +1,28 @@ +import scala.quoted.* + +inline def ownerWorks(in: Int): Any = + ${ownerWorksImpl('in)} + +transparent inline def ownerDoesNotWork(in: Int): Any = + ${ownerWorksImpl('in)} + +def ownerWorksImpl(in: Expr[Int])(using Quotes): Expr[String] = + import quotes.reflect.* + val position = Position.ofMacroExpansion + val file = position.sourceFile + val owner0 = Symbol.spliceOwner.maybeOwner + val ownerName = owner0.tree match { + case ValDef(name, _, _) => + name + case DefDef(name, _, _, _) => + name + case t => report.errorAndAbort(s"unexpected tree shape: ${t.show}") + } + val path = file.path + val line = position.startLine + val column = position.startColumn + val v = in.valueOrAbort + val out = Expr(s"val $ownerName $v: $file @ ${position.startLine}") + out + + diff --git a/tests/neg-macros/i16582/Test_2.scala b/tests/neg-macros/i16582/Test_2.scala new file mode 100644 index 000000000000..7cfd65febd00 --- /dev/null +++ b/tests/neg-macros/i16582/Test_2.scala @@ -0,0 +1,6 @@ +def test= + val o1 = ownerWorks(1) + println(o1) + + val o2 = ownerDoesNotWork(2) // error + println(o2) diff --git a/tests/neg-macros/i16835.check b/tests/neg-macros/i16835.check new file mode 100644 index 000000000000..fb02f3c7f13f --- /dev/null +++ b/tests/neg-macros/i16835.check @@ -0,0 +1,6 @@ + +-- Error: tests/neg-macros/i16835/Test_2.scala:1:17 -------------------------------------------------------------------- +1 |def test: Unit = foo // error + | ^^^ + | my error + | my second error diff --git a/tests/neg-macros/i16835/Macro_1.scala b/tests/neg-macros/i16835/Macro_1.scala new file mode 100644 index 000000000000..ddee5dbecb4e --- /dev/null +++ b/tests/neg-macros/i16835/Macro_1.scala @@ -0,0 +1,21 @@ +import scala.quoted.* + +class Bar + +inline def foo: Unit = ${ fooExpr } + +def fooExpr(using Quotes): Expr[Unit] = + import quotes.reflect.* + Implicits.search(TypeRepr.of[Bar]) match + case res: ImplicitSearchSuccess => '{} + case failure: ImplicitSearchFailure => + report.errorAndAbort(failure.explanation) + + +inline given bar: Bar = ${ barExpr } + +def barExpr(using Quotes): Expr[Bar] = + import quotes.reflect.* + report.error(s"my error") + report.error(s"my second error") + '{ new Bar } diff --git a/tests/neg-macros/i16835/Test_2.scala b/tests/neg-macros/i16835/Test_2.scala new file mode 100644 index 000000000000..0dc2d39d6c3d --- /dev/null +++ b/tests/neg-macros/i16835/Test_2.scala @@ -0,0 +1 @@ +def test: Unit = foo // error diff --git a/tests/neg-macros/i17103.scala b/tests/neg-macros/i17103.scala new file mode 100644 index 000000000000..bd4b41d8b559 --- /dev/null +++ b/tests/neg-macros/i17103.scala @@ -0,0 +1,16 @@ +import scala.quoted.* + +def test(using Quotes): Expr[Unit] = + '{ + trait C: + def d: Int + val c: C = ??? + ${ + val expr = '{ + val cRef: c.type = ??? + cRef.d // error + () + } + expr + } + } \ No newline at end of file diff --git a/tests/neg-macros/i17351/Macro_1.scala b/tests/neg-macros/i17351/Macro_1.scala new file mode 100644 index 000000000000..b80999e1bce5 --- /dev/null +++ b/tests/neg-macros/i17351/Macro_1.scala @@ -0,0 +1,16 @@ +import scala.quoted.* + +inline def gen: Unit = ${ genImpl } + +def genImpl(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + val valDefSymbol = Symbol.newVal(Symbol.spliceOwner, "bar", TypeRepr.of[Unit], Flags.EmptyFlags, Symbol.spliceOwner) + + val valDef = ValDef(valDefSymbol, Some('{ () }.asTerm)) + + Block( + List(valDef), + '{ () }.asTerm + ).asExprOf[Unit] +} diff --git a/tests/neg-macros/i17351/Test_2.scala b/tests/neg-macros/i17351/Test_2.scala new file mode 100644 index 000000000000..209c23204ad3 --- /dev/null +++ b/tests/neg-macros/i17351/Test_2.scala @@ -0,0 +1 @@ +val foo = gen // error diff --git a/tests/neg-macros/i6436.check b/tests/neg-macros/i6436.check index 9c422fa11b99..d563abb5424c 100644 --- a/tests/neg-macros/i6436.check +++ b/tests/neg-macros/i6436.check @@ -1,7 +1,9 @@ --- Error: tests/neg-macros/i6436.scala:5:9 ----------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg-macros/i6436.scala:5:9 ----------------------------------------------------------------- 5 | case '{ StringContext(${Varargs(parts)}*) } => // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | No given instance of type scala.quoted.Quotes was found + | + | longer explanation available when compiling with `-explain` -- [E006] Not Found Error: tests/neg-macros/i6436.scala:6:34 ----------------------------------------------------------- 6 | val ps: Seq[Expr[String]] = parts // error | ^^^^^ diff --git a/tests/neg-macros/i6762.scala b/tests/neg-macros/i6762.scala index a8df289b26c2..054945e213d6 100644 --- a/tests/neg-macros/i6762.scala +++ b/tests/neg-macros/i6762.scala @@ -2,4 +2,4 @@ import scala.quoted.* type G[X] case class Foo[T](x: T) -def f(word: String)(using Quotes): Expr[Foo[G[String]]] = '{Foo(${Expr(word)})} // error // error +def f(word: String)(using Quotes): Expr[Foo[G[String]]] = '{Foo(${Expr(word)})} // error diff --git a/tests/neg-macros/i6991.check b/tests/neg-macros/i6991.check new file mode 100644 index 000000000000..57d611a09053 --- /dev/null +++ b/tests/neg-macros/i6991.check @@ -0,0 +1,10 @@ +-- [E050] Type Error: tests/neg-macros/i6991.scala:11:14 --------------------------------------------------------------- +11 | case '{($x: Foo)($bar: String)} => '{"Hello World"} // error + | ^^^^^^^ + | expression does not take parameters + | + | longer explanation available when compiling with `-explain` +-- [E008] Not Found Error: tests/neg-macros/i6991.scala:12:23 ---------------------------------------------------------- +12 | case '{($x: Foo).apply($bar: String)} => '{"Hello World"} // error + | ^^^^^^^^^^^^^^^ + | value apply is not a member of macros.Foo diff --git a/tests/neg-macros/i6991.scala b/tests/neg-macros/i6991.scala new file mode 100644 index 000000000000..c6838261ed7a --- /dev/null +++ b/tests/neg-macros/i6991.scala @@ -0,0 +1,16 @@ +import scala.quoted._ + +object macros { + inline def mcr(x: => Any): Any = ${mcrImpl('x)} + + class Foo // { def apply(str: String) = "100" } + class Bar { def apply(str: String) = "100" } + + def mcrImpl(body: Expr[Any])(using ctx: Quotes): Expr[Any] = { + body match { + case '{($x: Foo)($bar: String)} => '{"Hello World"} // error + case '{($x: Foo).apply($bar: String)} => '{"Hello World"} // error + case '{($x: Bar)($bar: String)} => '{"Hello World"} + } + } +} diff --git a/tests/neg-macros/i8887.scala b/tests/neg-macros/i8887.scala new file mode 100644 index 000000000000..944544ba33dc --- /dev/null +++ b/tests/neg-macros/i8887.scala @@ -0,0 +1,5 @@ +import scala.quoted._ + +def expr[X](x: Any)(using Quotes): Expr[Any] = + '{ foo[x.type] } // error +def foo[X]: Any = ??? diff --git a/tests/neg-macros/i9014b.check b/tests/neg-macros/i9014b.check index 0d972e123a30..de0be2d5c1fa 100644 --- a/tests/neg-macros/i9014b.check +++ b/tests/neg-macros/i9014b.check @@ -1,5 +1,5 @@ --- Error: tests/neg-macros/i9014b/Test_2.scala:1:23 -------------------------------------------------------------------- +-- [E172] Type Error: tests/neg-macros/i9014b/Test_2.scala:1:23 -------------------------------------------------------- 1 |val tests = summon[Bar] // error | ^ | No given instance of type Bar was found for parameter x of method summon in object Predef. diff --git a/tests/neg-macros/ill-abort.check b/tests/neg-macros/ill-abort.check index 2f76c89d88dd..c267c2e79ecf 100644 --- a/tests/neg-macros/ill-abort.check +++ b/tests/neg-macros/ill-abort.check @@ -2,7 +2,7 @@ -- Error: tests/neg-macros/ill-abort/quoted_2.scala:1:15 --------------------------------------------------------------- 1 |def test = fail() // error | ^^^^^^ - |Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users to facilitate debugging when aborting a macro expansion. + |Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion. |--------------------------------------------------------------------------------------------------------------------- |Inline stack trace |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/neg-macros/quote-this-a.scala b/tests/neg-macros/quote-this-a.scala index 11621176526b..9f71aca0a7fb 100644 --- a/tests/neg-macros/quote-this-a.scala +++ b/tests/neg-macros/quote-this-a.scala @@ -4,9 +4,7 @@ class Foo { def f(using Quotes): Unit = '{ def bar[T](x: T): T = x - bar[ - this.type // error - ] { + bar[this.type] { this // error } } diff --git a/tests/neg-scalajs/js-trait-ctor-param.check b/tests/neg-scalajs/js-trait-ctor-param.check new file mode 100644 index 000000000000..bc5296b3c76f --- /dev/null +++ b/tests/neg-scalajs/js-trait-ctor-param.check @@ -0,0 +1,4 @@ +-- Error: tests/neg-scalajs/js-trait-ctor-param.scala:9:34 ------------------------------------------------------------- +9 |trait NonNativeBagHolderTrait(val bag: Bag) extends js.Any // error + | ^^^^^^^^^^^^ + | A non-native JS trait cannot have constructor parameters diff --git a/tests/neg-scalajs/js-trait-ctor-param.scala b/tests/neg-scalajs/js-trait-ctor-param.scala new file mode 100644 index 000000000000..c907b0d9b606 --- /dev/null +++ b/tests/neg-scalajs/js-trait-ctor-param.scala @@ -0,0 +1,9 @@ +import scala.scalajs.js +import scala.scalajs.js.annotation.* + +@js.native +trait Bag extends js.Any { + val str: String +} + +trait NonNativeBagHolderTrait(val bag: Bag) extends js.Any // error diff --git a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check index 301111860aa7..7687543ea75f 100644 --- a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check @@ -13,7 +13,7 @@ -- [E170] Type Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:17:27 ----------------------------- 17 | val d = js.constructorOf[NativeJSClass { def bar: Int }] // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | NativeJSClass{bar: Int} is not a class type + | NativeJSClass{def bar: Int} is not a class type -- Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:19:27 ----------------------------------------- 19 | val e = js.constructorOf[JSTrait] // error | ^^^^^^^ @@ -29,7 +29,7 @@ -- [E170] Type Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:23:27 ----------------------------- 23 | val h = js.constructorOf[JSClass { def bar: Int }] // error | ^^^^^^^^^^^^^^^^^^^^^^^^ - | JSClass{bar: Int} is not a class type + | JSClass{def bar: Int} is not a class type -- [E170] Type Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:25:42 ----------------------------- 25 | def foo[A <: js.Any] = js.constructorOf[A] // error | ^ diff --git a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check index c4ce18b2e57c..142de318efd3 100644 --- a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check @@ -9,11 +9,11 @@ -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:16:61 ---------------------------- 16 | val c = js.constructorTag[NativeJSClass with NativeJSTrait] // error | ^ - | (NativeJSClass & NativeJSTrait) is not a class type + | NativeJSClass & NativeJSTrait is not a class type -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:17:59 ---------------------------- 17 | val d = js.constructorTag[NativeJSClass { def bar: Int }] // error | ^ - | NativeJSClass{bar: Int} is not a class type + | NativeJSClass{def bar: Int} is not a class type -- Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:19:36 ---------------------------------------- 19 | val e = js.constructorTag[JSTrait] // error | ^ @@ -25,11 +25,11 @@ -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:22:49 ---------------------------- 22 | val g = js.constructorTag[JSClass with JSTrait] // error | ^ - | (JSClass & JSTrait) is not a class type + | JSClass & JSTrait is not a class type -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:23:53 ---------------------------- 23 | val h = js.constructorTag[JSClass { def bar: Int }] // error | ^ - | JSClass{bar: Int} is not a class type + | JSClass{def bar: Int} is not a class type -- [E170] Type Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:25:45 ---------------------------- 25 | def foo[A <: js.Any] = js.constructorTag[A] // error | ^ diff --git a/tests/neg-scalajs/jsconstructortag-error-in-typer.check b/tests/neg-scalajs/jsconstructortag-error-in-typer.check index ba845de39231..888fa163e81c 100644 --- a/tests/neg-scalajs/jsconstructortag-error-in-typer.check +++ b/tests/neg-scalajs/jsconstructortag-error-in-typer.check @@ -1,4 +1,4 @@ --- Error: tests/neg-scalajs/jsconstructortag-error-in-typer.scala:9:39 ------------------------------------------------- +-- [E172] Type Error: tests/neg-scalajs/jsconstructortag-error-in-typer.scala:9:39 ------------------------------------- 9 | val a = js.constructorTag[ScalaClass] // error | ^ |No given instance of type scala.scalajs.js.ConstructorTag[ScalaClass] was found for parameter tag of method constructorTag in package scala.scalajs.js. @@ -7,7 +7,7 @@ | scala.scalajs.js.ConstructorTag.materialize[T] | |But method materialize in object ConstructorTag does not match type scala.scalajs.js.ConstructorTag[ScalaClass]. --- Error: tests/neg-scalajs/jsconstructortag-error-in-typer.scala:10:39 ------------------------------------------------ +-- [E172] Type Error: tests/neg-scalajs/jsconstructortag-error-in-typer.scala:10:39 ------------------------------------ 10 | val b = js.constructorTag[ScalaTrait] // error | ^ |No given instance of type scala.scalajs.js.ConstructorTag[ScalaTrait] was found for parameter tag of method constructorTag in package scala.scalajs.js. @@ -16,7 +16,7 @@ | scala.scalajs.js.ConstructorTag.materialize[T] | |But method materialize in object ConstructorTag does not match type scala.scalajs.js.ConstructorTag[ScalaTrait]. --- Error: tests/neg-scalajs/jsconstructortag-error-in-typer.scala:11:45 ------------------------------------------------ +-- [E172] Type Error: tests/neg-scalajs/jsconstructortag-error-in-typer.scala:11:45 ------------------------------------ 11 | val c = js.constructorTag[ScalaObject.type] // error | ^ |No given instance of type scala.scalajs.js.ConstructorTag[ScalaObject.type] was found for parameter tag of method constructorTag in package scala.scalajs.js. diff --git a/tests/neg/17077.scala b/tests/neg/17077.scala new file mode 100644 index 000000000000..26a052a7bf97 --- /dev/null +++ b/tests/neg/17077.scala @@ -0,0 +1,14 @@ +case class IsIntResult() + +object IsInt: + def unapply(x: Int): IsIntResult = IsIntResult() + +@main def test = + val v: String | Int = "Blop" + val res = + v match + case IsInt() => 43 // error: cannot use a product of arity zero as a return type for unapply + // see UnapplyInvalidReturnType in messages.scala + // and https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html#fixed-arity-extractors + case _ => 42 + println(res) diff --git a/tests/neg/6570-1.check b/tests/neg/6570-1.check index fa53e71cbb6b..bdbadd0f752a 100644 --- a/tests/neg/6570-1.check +++ b/tests/neg/6570-1.check @@ -7,7 +7,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce N[Box[Int & String]] - | failed since selector Box[Int & String] + | failed since selector Box[Int & String] | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -23,7 +23,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce M[T] - | failed since selector T + | failed since selector T | does not uniquely determine parameter x in | case Cov[x] => N[x] | The computed bounds for the parameter are: diff --git a/tests/neg/6571.check b/tests/neg/6571.check index 42997407765f..4172abb2919b 100644 --- a/tests/neg/6571.check +++ b/tests/neg/6571.check @@ -7,7 +7,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.M[Test.Inv[Int] & Test.Inv[String]] - | failed since selector Test.Inv[Int] & Test.Inv[String] + | failed since selector Test.Inv[Int] & Test.Inv[String] | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -20,7 +20,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.M[Test.Inv[String] & Test.Inv[Int]] - | failed since selector Test.Inv[String] & Test.Inv[Int] + | failed since selector Test.Inv[String] & Test.Inv[Int] | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/abstract-givens.check b/tests/neg/abstract-givens.check index a74d0097b091..022c454c31f1 100644 --- a/tests/neg/abstract-givens.check +++ b/tests/neg/abstract-givens.check @@ -7,7 +7,7 @@ | ^ | error overriding given instance y in trait T of type (using x$1: Int): String; | given instance y of type (using x$1: Int): String cannot override final member given instance y in trait T --- [E163] Declaration Error: tests/neg/abstract-givens.scala:9:8 ------------------------------------------------------- +-- [E164] Declaration Error: tests/neg/abstract-givens.scala:9:8 ------------------------------------------------------- 9 | given z[T](using T): Seq[T] = List(summon[T]) // error | ^ | error overriding given instance z in trait T of type [T](using x$1: T): List[T]; diff --git a/tests/neg/ambiref.check b/tests/neg/ambiref.check index 95b542c7aae3..5d701b3b3b71 100644 --- a/tests/neg/ambiref.check +++ b/tests/neg/ambiref.check @@ -1,32 +1,32 @@ -- [E049] Reference Error: tests/neg/ambiref.scala:8:14 ---------------------------------------------------------------- 8 | println(x) // error | ^ - | Reference to x is ambiguous, - | it is both defined in object Test + | Reference to x is ambiguous. + | It is both defined in object Test | and inherited subsequently in class D | | longer explanation available when compiling with `-explain` -- [E049] Reference Error: tests/neg/ambiref.scala:10:14 --------------------------------------------------------------- 10 | println(x) // error | ^ - | Reference to x is ambiguous, - | it is both defined in object Test + | Reference to x is ambiguous. + | It is both defined in object Test | and inherited subsequently in anonymous class test1.C {...} | | longer explanation available when compiling with `-explain` -- [E049] Reference Error: tests/neg/ambiref.scala:17:14 --------------------------------------------------------------- 17 | println(y) // error | ^ - | Reference to y is ambiguous, - | it is both defined in method c + | Reference to y is ambiguous. + | It is both defined in method c | and inherited subsequently in anonymous class D {...} | | longer explanation available when compiling with `-explain` -- [E049] Reference Error: tests/neg/ambiref.scala:25:16 --------------------------------------------------------------- 25 | println(y) // error | ^ - | Reference to y is ambiguous, - | it is both defined in method c + | Reference to y is ambiguous. + | It is both defined in method c | and inherited subsequently in class E | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/classOf.check b/tests/neg/classOf.check index c3873aff7391..e3be3ca17026 100644 --- a/tests/neg/classOf.check +++ b/tests/neg/classOf.check @@ -11,4 +11,4 @@ -- [E170] Type Error: tests/neg/classOf.scala:9:18 --------------------------------------------------------------------- 9 | val y = classOf[C { type I = String }] // error | ^^^^^^^^^^^^^^^^^^^^^ - | Test.C{I = String} is not a class type + | Test.C{type I = String} is not a class type diff --git a/tests/neg/closure-args.scala b/tests/neg/closure-args.scala index 3b166c81c61c..76e590ad28b9 100644 --- a/tests/neg/closure-args.scala +++ b/tests/neg/closure-args.scala @@ -1,4 +1,4 @@ -import language.experimental.fewerBraces +import language.`3.3` val x = List(1).map: (x: => Int) => // error ??? diff --git a/tests/neg/constructor-proxy-shadowing.scala b/tests/neg/constructor-proxy-shadowing.scala deleted file mode 100644 index 857ef986cb79..000000000000 --- a/tests/neg/constructor-proxy-shadowing.scala +++ /dev/null @@ -1,10 +0,0 @@ - -object Test extends App { - def A22(s: String): String = s - class A { - class A22(s: String) { - def run = s - } - val x = A22("") // error: shadowing - } -} \ No newline at end of file diff --git a/tests/neg/enum-values.check b/tests/neg/enum-values.check index 84df5889b500..23337de1b2c4 100644 --- a/tests/neg/enum-values.check +++ b/tests/neg/enum-values.check @@ -6,7 +6,9 @@ | meaning a values array is not defined. | An extension method was tried, but could not be fully constructed: | - | example.Extensions.values(Tag) failed with + | example.Extensions.values(Tag) + | + | failed with: | | Found: example.Tag.type | Required: Nothing @@ -18,10 +20,12 @@ | meaning a values array is not defined. | An extension method was tried, but could not be fully constructed: | - | example.Extensions.values(ListLike) failed with + | example.Extensions.values(ListLike) + | + | failed with: | - | Found: Array[example.Tag[?]] - | Required: Array[example.ListLike[?]] + | Found: example.ListLike.type + | Required: Nothing -- [E008] Not Found Error: tests/neg/enum-values.scala:34:52 ----------------------------------------------------------- 34 | val typeCtorsK: Array[TypeCtorsK[?]] = TypeCtorsK.values // error | ^^^^^^^^^^^^^^^^^ @@ -30,10 +34,12 @@ | meaning a values array is not defined. | An extension method was tried, but could not be fully constructed: | - | example.Extensions.values(TypeCtorsK) failed with + | example.Extensions.values(TypeCtorsK) | - | Found: Array[example.Tag[?]] - | Required: Array[example.TypeCtorsK[?[_$1]]] + | failed with: + | + | Found: example.TypeCtorsK.type + | Required: Nothing -- [E008] Not Found Error: tests/neg/enum-values.scala:36:6 ------------------------------------------------------------ 36 | Tag.valueOf("Int") // error | ^^^^^^^^^^^ @@ -63,7 +69,9 @@ | value values is not a member of object example.NotAnEnum. | An extension method was tried, but could not be fully constructed: | - | example.Extensions.values(NotAnEnum) failed with + | example.Extensions.values(NotAnEnum) + | + | failed with: | | Found: example.NotAnEnum.type | Required: Nothing diff --git a/tests/neg/enumsAccess.scala b/tests/neg/enumsAccess.scala index 18b91b346b6a..8a8e9af8910f 100644 --- a/tests/neg/enumsAccess.scala +++ b/tests/neg/enumsAccess.scala @@ -63,7 +63,7 @@ object test5 { enum E5[T](x: T) { case C3() extends E5[INT](defaultX)// error: illegal reference // error: illegal reference case C4 extends E5[INT](defaultX) // error: illegal reference // error: illegal reference - case C5 extends E5[E5[_]](E5.this) // error: type mismatch + case C5 extends E5[E5[_]](E5.this) // error: cannot be instantiated // error: conflicting base types // error: type mismatch } object E5 { diff --git a/tests/neg/equality1.scala b/tests/neg/equality1.scala index 74bd45b18c12..cbd962a32bf6 100644 --- a/tests/neg/equality1.scala +++ b/tests/neg/equality1.scala @@ -132,4 +132,9 @@ object equality1 { println("empty") } + Map("k1" -> 1) == Map("k2" -> 2, "k3" -> 3) + Map(Color.Red -> Status.Inactive) == Map(Color.Green -> Status.Active(5)) + + Map("k1" -> 5) == Map('k' -> 5) // error + Map("k1" -> new A) == Map("k2" -> new B) // error } diff --git a/tests/neg/experimentalInheritance.scala b/tests/neg/experimentalInheritance.scala deleted file mode 100644 index 8b6c0b11afa3..000000000000 --- a/tests/neg/experimentalInheritance.scala +++ /dev/null @@ -1,44 +0,0 @@ -import scala.annotation.experimental - -@experimental -class A - -@experimental -trait T - -class B extends A // error - -@experimental -class B2 extends A - -class C extends T // error - -@experimental -class C2 extends T - -@experimental -class O: - class X - - @experimental - class Y - - object Z - -@experimental -object O: - class A - - @experimental - class B - - object C - -class OA extends O.A // error -class OB extends O.B // error - -@experimental -class OA2 extends O.A - -@experimental -class OB2 extends O.B diff --git a/tests/neg/experimentalInheritance2.scala b/tests/neg/experimentalInheritance2.scala deleted file mode 100644 index 84668ac5850f..000000000000 --- a/tests/neg/experimentalInheritance2.scala +++ /dev/null @@ -1,6 +0,0 @@ -import scala.annotation.experimental - -@experimental class A - -class B // // error: extension of experimental class A1 must have @experimental annotation - extends A diff --git a/tests/neg/exports.check b/tests/neg/exports.check index 49d8cdf0654b..79951cebfc39 100644 --- a/tests/neg/exports.check +++ b/tests/neg/exports.check @@ -11,7 +11,7 @@ 25 | export printUnit.bitmap // error: no eligible member | ^ | non-private given instance bitmap in class Copier refers to private value printUnit - | in its type signature => Copier.this.printUnit.bitmap + | in its type signature => object Copier.this.printUnit.bitmap -- [E120] Naming Error: tests/neg/exports.scala:23:33 ------------------------------------------------------------------ 23 | export printUnit.{stat => _, _} // error: double definition | ^ diff --git a/tests/neg/extension-methods.scala b/tests/neg/extension-methods.scala index e075105762f9..a11b2cca5add 100644 --- a/tests/neg/extension-methods.scala +++ b/tests/neg/extension-methods.scala @@ -15,4 +15,4 @@ object Test { def f2[T]: T = ??? // error: T is already defined as type T def f3(xs: List[T]) = ??? // error: xs is already defined as value xs } -} \ No newline at end of file +} diff --git a/tests/neg/harmonize.scala b/tests/neg/harmonize.scala index 0fe03d2d7600..72275a8f68fc 100644 --- a/tests/neg/harmonize.scala +++ b/tests/neg/harmonize.scala @@ -79,9 +79,9 @@ object Test { val a4 = ArrayBuffer(1.0f, 1L) val b4: ArrayBuffer[Double] = a4 // error: no widening val a5 = ArrayBuffer(1.0f, 1L, f()) - val b5: ArrayBuffer[AnyVal] = a5 + val b5: ArrayBuffer[Float | Long | Int] = a5 val a6 = ArrayBuffer(1.0f, 1234567890) - val b6: ArrayBuffer[AnyVal] = a6 + val b6: ArrayBuffer[Float | Int] = a6 def totalDuration(results: List[Long], cond: Boolean): Long = results.map(r => if (cond) r else 0).sum diff --git a/tests/neg/i10098.check b/tests/neg/i10098.check index 06d0c62b69c0..94cc911b7753 100644 --- a/tests/neg/i10098.check +++ b/tests/neg/i10098.check @@ -1,16 +1,16 @@ --- Error: tests/neg/i10098.scala:20:32 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i10098.scala:20:32 --------------------------------------------------------------------- 20 | implicitly[Bar12[Int, String]] // error | ^ | There's no Foo2[String, Int] --- Error: tests/neg/i10098.scala:21:32 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i10098.scala:21:32 --------------------------------------------------------------------- 21 | implicitly[Bar21[Int, String]] // error | ^ | There's no Foo1[String, Int] --- Error: tests/neg/i10098.scala:22:32 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i10098.scala:22:32 --------------------------------------------------------------------- 22 | implicitly[Baz12[Int, String]] // error | ^ | There's no Baz12[Int, String] --- Error: tests/neg/i10098.scala:23:32 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i10098.scala:23:32 --------------------------------------------------------------------- 23 | implicitly[Baz21[Int, String]] // error | ^ | There's no Baz21[Int, String] diff --git a/tests/neg/i10603a.check b/tests/neg/i10603a.check index 578b942f6023..1d885dfdb762 100644 --- a/tests/neg/i10603a.check +++ b/tests/neg/i10603a.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i10603a.scala:2:35 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i10603a.scala:2:35 --------------------------------------------------------------------- 2 | val x = implicitly[List[Boolean]] // error | ^ | No given instance of type List[Boolean] was found for parameter e of method implicitly in object Predef diff --git a/tests/neg/i10603b.check b/tests/neg/i10603b.check index 14a03fc9d3d7..cd230c44538b 100644 --- a/tests/neg/i10603b.check +++ b/tests/neg/i10603b.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i10603b.scala:4:35 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i10603b.scala:4:35 --------------------------------------------------------------------- 4 | val x = implicitly[List[Boolean]] // error | ^ | No implicit view available from Int => Boolean. diff --git a/tests/neg/i10715a.scala b/tests/neg/i10715a.scala new file mode 100644 index 000000000000..b5794c46d22c --- /dev/null +++ b/tests/neg/i10715a.scala @@ -0,0 +1,22 @@ +class Parent: + def f(x: Int): Parent = ??? + def f: Int = 0 + + def g[A](x: Int): Parent = ??? + def g[A]: Int = 0 + +class Sub extends Parent: + override def f(x: Int): Parent = ??? + override def g[A](x: Int): Parent = ??? + +def bad(c: Sub): Unit = + c.f: String // error + c.g: String // error + c.f.bad // error + c.g.bad // error + + c.f("") // error + c.g("") // error + c.g[Int]("") // error + c.g[Int]: (String => String) // error + c.g[Int]: (Int => Parent) // ok diff --git a/tests/neg/i10715b.scala b/tests/neg/i10715b.scala new file mode 100644 index 000000000000..922b80cf727b --- /dev/null +++ b/tests/neg/i10715b.scala @@ -0,0 +1,10 @@ +class Parent: + def f(x: Int): Unit = () + def f: Int = 0 + +class Sub extends Parent: + override def f(x: Int): Unit = () + def f(x: Int)(using String): Unit = () + +def bad(c: Sub): Unit = + c.f(1) // error: ambiguous overload diff --git a/tests/neg/i10901.check b/tests/neg/i10901.check index 26270ced338b..e055bed7dd3a 100644 --- a/tests/neg/i10901.check +++ b/tests/neg/i10901.check @@ -4,7 +4,9 @@ | value º is not a member of object BugExp4Point2D.IntT. | An extension method was tried, but could not be fully constructed: | - | º(x) failed with + | º(x) + | + | failed with: | | Ambiguous overload. The overloaded alternatives of method º in object dsl with types | [T1, T2] @@ -22,7 +24,9 @@ |value º is not a member of object BugExp4Point2D.IntT. |An extension method was tried, but could not be fully constructed: | - | º(x) failed with + | º(x) + | + | failed with: | | Ambiguous overload. The overloaded alternatives of method º in object dsl with types | [T1, T2] @@ -36,6 +40,8 @@ | value foo is not a member of String. | An extension method was tried, but could not be fully constructed: | - | Test.foo("abc")(/* missing */summon[C]) failed with + | Test.foo("abc")(/* missing */summon[C]) + | + | failed with: | | No given instance of type C was found for parameter x$2 of method foo in object Test diff --git a/tests/neg/i10943.scala b/tests/neg/i10943.scala index 4a9697c31874..09a42ce66cc4 100644 --- a/tests/neg/i10943.scala +++ b/tests/neg/i10943.scala @@ -1,4 +1,4 @@ -import language.experimental.fewerBraces +import language.`3.3` object T: class A diff --git a/tests/neg/i11066.check b/tests/neg/i11066.check index a73cb5566439..24b91b2f69ee 100644 --- a/tests/neg/i11066.check +++ b/tests/neg/i11066.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i11066.scala:15:37 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i11066.scala:15:37 --------------------------------------------------------------------- 15 |val x = Greeter.greet("Who's there?") // error | ^ |Ambiguous given instances: both given instance joesPrompt in object JoesPrefs and given instance jillsPrompt in object JillsPrefs match type PreferredPrompt of parameter prompt of method greet in object Greeter diff --git a/tests/neg/i11118.check b/tests/neg/i11118.check new file mode 100644 index 000000000000..0af98c7f580a --- /dev/null +++ b/tests/neg/i11118.check @@ -0,0 +1,12 @@ +-- Warning: tests/neg/i11118.scala:2:12 -------------------------------------------------------------------------------- +2 |val (a,b) = (1,2,3) // error // warning + | ^^^^^^^ + | pattern's type (Any, Any) does not match the right hand side expression's type (Int, Int, Int) + | + | If the narrowing is intentional, this can be communicated by adding `: @unchecked` after the expression, + | which may result in a MatchError at runtime. + | This patch can be rewritten automatically under -rewrite -source 3.2-migration. +-- Error: tests/neg/i11118.scala:2:4 ----------------------------------------------------------------------------------- +2 |val (a,b) = (1,2,3) // error // warning + | ^ + | this case is unreachable since type (Int, Int, Int) is not a subclass of class Tuple2 diff --git a/tests/neg/i11118.scala b/tests/neg/i11118.scala new file mode 100644 index 000000000000..23d9b2b604b6 --- /dev/null +++ b/tests/neg/i11118.scala @@ -0,0 +1,2 @@ +// https://github.com/lampepfl/dotty/issues/11118 +val (a,b) = (1,2,3) // error // warning diff --git a/tests/neg/i11797.check b/tests/neg/i11797.check index 80090b6b2faf..62b8a8828069 100644 --- a/tests/neg/i11797.check +++ b/tests/neg/i11797.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i11797.scala:6:17 ---------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i11797.scala:6:17 ---------------------------------------------------------------------- 6 | summon[Foo.Bar] // error | ^ | Oops diff --git a/tests/neg/i11897.check b/tests/neg/i11897.check index 4b001fadc606..67de6dbab37d 100644 --- a/tests/neg/i11897.check +++ b/tests/neg/i11897.check @@ -23,23 +23,23 @@ | ^^^^^^^^^^^ | given patterns are not allowed in a val definition, | please bind to an identifier and use an alias given. --- Error: tests/neg/i11897.scala:16:18 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i11897.scala:16:18 --------------------------------------------------------------------- 16 | assert(summon[A] == A(23)) // error | ^ | No given instance of type A was found for parameter x of method summon in object Predef --- Error: tests/neg/i11897.scala:17:18 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i11897.scala:17:18 --------------------------------------------------------------------- 17 | assert(summon[B] == B(false)) // error | ^ | No given instance of type B was found for parameter x of method summon in object Predef --- Error: tests/neg/i11897.scala:18:18 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i11897.scala:18:18 --------------------------------------------------------------------- 18 | assert(summon[C] == C("c")) // error | ^ | No given instance of type C was found for parameter x of method summon in object Predef --- Error: tests/neg/i11897.scala:19:18 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i11897.scala:19:18 --------------------------------------------------------------------- 19 | assert(summon[E] == E(93)) // error | ^ | No given instance of type E was found for parameter x of method summon in object Predef --- Error: tests/neg/i11897.scala:20:18 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i11897.scala:20:18 --------------------------------------------------------------------- 20 | assert(summon[G] == G(101)) // error | ^ | No given instance of type G was found for parameter x of method summon in object Predef diff --git a/tests/neg/i11982.check b/tests/neg/i11982.check index 48ec252a4410..304accbf0269 100644 --- a/tests/neg/i11982.check +++ b/tests/neg/i11982.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i11982.scala:22:38 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i11982.scala:22:38 --------------------------------------------------------------------- 22 | val p1: ("msg", 42) = unpair[Tshape] // error: no singleton value for Any | ^ |No singleton value available for Any; eligible singleton types for `ValueOf` synthesis include literals and stable paths. diff --git a/tests/neg/i11982a.check b/tests/neg/i11982a.check index bc07c82059cc..1977aa30e8b5 100644 --- a/tests/neg/i11982a.check +++ b/tests/neg/i11982a.check @@ -6,7 +6,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Tuple.Tail[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameter xs in | case _ *: xs => xs | The computed bounds for the parameter are: @@ -21,7 +21,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Tuple.Tail[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameter xs in | case _ *: xs => xs | The computed bounds for the parameter are: @@ -36,7 +36,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Tuple.Tail[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameter xs in | case _ *: xs => xs | The computed bounds for the parameter are: diff --git a/tests/neg/i12049.check b/tests/neg/i12049.check index edf76a0823b9..11c648e35a57 100644 --- a/tests/neg/i12049.check +++ b/tests/neg/i12049.check @@ -7,7 +7,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce M[B] - | failed since selector B + | failed since selector B | does not match case A => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -18,7 +18,7 @@ -- Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------------------- 14 |val y3: String = ??? : Last[Int *: Int *: Boolean *: String *: EmptyTuple] // error | ^ - | Match type reduction failed since selector EmptyTuple.type + | Match type reduction failed since selector EmptyTuple.type | matches none of the cases | | case _ *: _ *: t => Last[t] @@ -26,12 +26,12 @@ -- Error: tests/neg/i12049.scala:22:26 --------------------------------------------------------------------------------- 22 |val z3: (A, B, A) = ??? : Reverse[(A, B, A)] // error | ^ - | Match type reduction failed since selector A *: EmptyTuple.type + | Match type reduction failed since selector A *: EmptyTuple.type | matches none of the cases | | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] | case EmptyTuple => EmptyTuple --- Error: tests/neg/i12049.scala:24:20 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i12049.scala:24:20 --------------------------------------------------------------------- 24 |val _ = summon[M[B]] // error | ^ | No given instance of type M[B] was found for parameter x of method summon in object Predef @@ -39,7 +39,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce M[B] - | failed since selector B + | failed since selector B | does not match case A => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -48,7 +48,7 @@ -- Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------------------- 25 |val _ = summon[String =:= Last[Int *: Int *: Boolean *: String *: EmptyTuple]] // error | ^ - | Match type reduction failed since selector EmptyTuple.type + | Match type reduction failed since selector EmptyTuple.type | matches none of the cases | | case _ *: _ *: t => Last[t] @@ -56,7 +56,7 @@ -- Error: tests/neg/i12049.scala:26:29 --------------------------------------------------------------------------------- 26 |val _ = summon[(A, B, A) =:= Reverse[(A, B, A)]] // error | ^ - | Match type reduction failed since selector A *: EmptyTuple.type + | Match type reduction failed since selector A *: EmptyTuple.type | matches none of the cases | | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] @@ -69,7 +69,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce M[B] - | failed since selector B + | failed since selector B | does not match case A => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case diff --git a/tests/neg/i12232.check b/tests/neg/i12232.check index 8de9b4317a31..eb4875ab77c3 100644 --- a/tests/neg/i12232.check +++ b/tests/neg/i12232.check @@ -1,10 +1,10 @@ --- Error: tests/neg/i12232.scala:17:15 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i12232.scala:17:15 --------------------------------------------------------------------- 17 | foo(min(3, 4)) // error: works in Scala 2, not in 3 | ^ | No given instance of type Op[Int, Int, V] was found for parameter op of method min in object Foo | | where: V is a type variable with constraint <: Double --- Error: tests/neg/i12232.scala:19:16 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i12232.scala:19:16 --------------------------------------------------------------------- 19 | foo(minR(3, 4)) // error: works in Scala 2, not in 3 | ^ | No given instance of type Op[Int, Int, R] was found for parameter op of method minR in object Foo diff --git a/tests/neg/i12448.scala b/tests/neg/i12448.scala new file mode 100644 index 000000000000..e495cfd19f1d --- /dev/null +++ b/tests/neg/i12448.scala @@ -0,0 +1,5 @@ +object Main { + def mkArray[T <: A]: T#AType // error // error + mkArray[Array] // was: "assertion failed: invalid prefix HKTypeLambda..." + val x = mkArray[Array] +} diff --git a/tests/neg/i12591.check b/tests/neg/i12591.check index e050038659b5..17d418713aa2 100644 --- a/tests/neg/i12591.check +++ b/tests/neg/i12591.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i12591/Inner.scala:12:31 --------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i12591/Inner.scala:12:31 --------------------------------------------------------------- 12 |val badSummon = summon[TC[Bar]] // error here | ^ |Ambiguous given instances: both outer.inner.Foo.ofFoo and outer.Foo.ofFoo match type outer.inner.Foo.TC[outer.Bar] of parameter x of method summon in object Predef diff --git a/tests/neg/i12682.check b/tests/neg/i12682.check new file mode 100644 index 000000000000..605414938529 --- /dev/null +++ b/tests/neg/i12682.check @@ -0,0 +1,51 @@ +-- [E049] Reference Error: tests/neg/i12682.scala:6:12 ----------------------------------------------------------------- +6 | val x = m(1) // error + | ^ + | Reference to m is ambiguous. + | It is both defined in object C + | and inherited subsequently in object T + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | The identifier m is ambiguous because a name binding of lower precedence + | in an inner scope cannot shadow a binding with higher precedence in + | an outer scope. + | + | The precedence of the different kinds of name bindings, from highest to lowest, is: + | - Definitions in an enclosing scope + | - Inherited definitions and top-level definitions in packages + | - Names introduced by import of a specific name + | - Names introduced by wildcard import + | - Definitions from packages in other files + | Note: + | - As a rule, definitions take precedence over imports. + | - Definitions in an enclosing scope take precedence over inherited definitions, + | which can result in ambiguities in nested classes. + | - When importing, you can avoid naming conflicts by renaming: + | import scala.{m => mTick} + --------------------------------------------------------------------------------------------------------------------- +-- [E049] Reference Error: tests/neg/i12682.scala:13:10 ---------------------------------------------------------------- +13 | def d = m(42) // error + | ^ + | Reference to m is ambiguous. + | It is both imported by import X._ + | and imported subsequently by import Y._ + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | The identifier m is ambiguous because two name bindings of equal precedence + | were introduced in the same scope. + | + | The precedence of the different kinds of name bindings, from highest to lowest, is: + | - Definitions in an enclosing scope + | - Inherited definitions and top-level definitions in packages + | - Names introduced by import of a specific name + | - Names introduced by wildcard import + | - Definitions from packages in other files + | Note: + | - As a rule, definitions take precedence over imports. + | - Definitions in an enclosing scope take precedence over inherited definitions, + | which can result in ambiguities in nested classes. + | - When importing, you can avoid naming conflicts by renaming: + | import scala.{m => mTick} + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i12682.scala b/tests/neg/i12682.scala new file mode 100644 index 000000000000..0b37816ef0df --- /dev/null +++ b/tests/neg/i12682.scala @@ -0,0 +1,13 @@ +// scalac: -explain + +object C: + def m(x: Int) = 1 + object T extends K: + val x = m(1) // error +class K: + def m(i: Int) = 2 +object X extends K +object Y extends K +object D: + import X.*, Y.* + def d = m(42) // error diff --git a/tests/neg/i12991.scala b/tests/neg/i12991.scala deleted file mode 100644 index 90e037424c49..000000000000 --- a/tests/neg/i12991.scala +++ /dev/null @@ -1,7 +0,0 @@ -object Foo: - inline def unapply(using String)(i: Int): Some[Int] = Some(i) - -given String = "" - -val i = 10 match - case Foo(x) => x // error diff --git a/tests/neg/i13558.check b/tests/neg/i13558.check deleted file mode 100644 index 4c468a854781..000000000000 --- a/tests/neg/i13558.check +++ /dev/null @@ -1,22 +0,0 @@ --- [E008] Not Found Error: tests/neg/i13558.scala:23:14 ---------------------------------------------------------------- -23 | println(a.id) // error - | ^^^^ - | value id is not a member of testcode.A. - | An extension method was tried, but could not be fully constructed: - | - | testcode.ExtensionA.id(a) failed with - | - | Reference to id is ambiguous, - | it is both imported by import testcode.ExtensionB._ - | and imported subsequently by import testcode.ExtensionA._ --- [E008] Not Found Error: tests/neg/i13558.scala:29:14 ---------------------------------------------------------------- -29 | println(a.id) // error - | ^^^^ - | value id is not a member of testcode.A. - | An extension method was tried, but could not be fully constructed: - | - | testcode.ExtensionB.id(a) failed with - | - | Reference to id is ambiguous, - | it is both imported by import testcode.ExtensionA._ - | and imported subsequently by import testcode.ExtensionB._ diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala new file mode 100644 index 000000000000..d8273e546dab --- /dev/null +++ b/tests/neg/i13757-match-type-anykind.scala @@ -0,0 +1,16 @@ +object Test: + type AnyKindMatchType1[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + case Option[a] => Int + + type AnyKindMatchType2[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded + case Option => Int // error: Missing type parameter for Option + + type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + case _ => Int + + type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded // error + case _ => Int + + type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + case _ => Int +end Test diff --git a/tests/neg/i13780.check b/tests/neg/i13780.check index 56b6a67ac8e7..aa0a47db5737 100644 --- a/tests/neg/i13780.check +++ b/tests/neg/i13780.check @@ -10,7 +10,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Head[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameters a, b in | case (a, b) => a | The computed bounds for the parameters are: @@ -30,7 +30,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Head[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameters a, b in | case (a, b) => a | The computed bounds for the parameters are: diff --git a/tests/neg/i13846.check b/tests/neg/i13846.check index 69ea0f0e51ac..a57db35ef6dd 100644 --- a/tests/neg/i13846.check +++ b/tests/neg/i13846.check @@ -2,7 +2,7 @@ 3 |def foo(): Int throws ArithmeticException = 1 / 0 // error | ^^^^^^^^^^^^^^^^^^^ | throws clause cannot be defined for RuntimeException --- Error: tests/neg/i13846.scala:7:9 ----------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i13846.scala:7:9 ----------------------------------------------------------------------- 7 | foo() // error | ^ | The capability to throw exception ArithmeticException is missing. diff --git a/tests/neg/i13864.check b/tests/neg/i13864.check index 54e81ea82774..6020ff8c6086 100644 --- a/tests/neg/i13864.check +++ b/tests/neg/i13864.check @@ -3,7 +3,7 @@ | ^^^^^^^^^^ | Implementation restriction: cannot generate CanThrow capability for this kind of catch. | CanThrow capabilities can only be generated for cases of the form `ex: T` where `T` is fully defined. --- Error: tests/neg/i13864.scala:9:10 ---------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i13864.scala:9:10 ---------------------------------------------------------------------- 9 | foo(1) // error | ^ | The capability to throw exception Ex[Int] is missing. diff --git a/tests/neg/i13991.check b/tests/neg/i13991.check index bd1cda58c046..4c24e14a85c6 100644 --- a/tests/neg/i13991.check +++ b/tests/neg/i13991.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i13991.scala:5:7 ----------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i13991.scala:5:7 ----------------------------------------------------------------------- 5 | first[String] // error // before line 10 to test alignment of the error message `|` | ^^^^^^^^^^^^^ | No given instance of type Foo[String] was found diff --git a/tests/neg/i14025.check b/tests/neg/i14025.check index 3c67b954297b..a44cdc67c1f8 100644 --- a/tests/neg/i14025.check +++ b/tests/neg/i14025.check @@ -1,8 +1,8 @@ --- Error: tests/neg/i14025.scala:1:88 ---------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14025.scala:1:88 ---------------------------------------------------------------------- 1 |val foo = summon[deriving.Mirror.Product { type MirroredType = [X] =>> [Y] =>> (X, Y) }] // error | ^ - |No given instance of type deriving.Mirror.Product{MirroredType[X] = [Y] =>> (X, Y)} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Product{MirroredType[X] = [Y] =>> (X, Y)}: type `[X] =>> [Y] =>> (X, Y)` is not a generic product because its subpart `[X] =>> [Y] =>> (X, Y)` is not a supported kind (either `*` or `* -> *`) --- Error: tests/neg/i14025.scala:2:90 ---------------------------------------------------------------------------------- + |No given instance of type deriving.Mirror.Product{type MirroredType[X] = [Y] =>> (X, Y)} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Product{type MirroredType[X] = [Y] =>> (X, Y)}: type `[X] =>> [Y] =>> (X, Y)` is not a generic product because its subpart `[X] =>> [Y] =>> (X, Y)` is not a supported kind (either `*` or `* -> *`) +-- [E172] Type Error: tests/neg/i14025.scala:2:90 ---------------------------------------------------------------------- 2 |val bar = summon[deriving.Mirror.Sum { type MirroredType = [X] =>> [Y] =>> List[(X, Y)] }] // error | ^ - |No given instance of type deriving.Mirror.Sum{MirroredType[X] = [Y] =>> List[(X, Y)]} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Sum{MirroredType[X] = [Y] =>> List[(X, Y)]}: type `[X] =>> [Y] =>> List[(X, Y)]` is not a generic sum because its subpart `[X] =>> [Y] =>> List[(X, Y)]` is not a supported kind (either `*` or `* -> *`) + |No given instance of type deriving.Mirror.Sum{type MirroredType[X] = [Y] =>> List[(X, Y)]} was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Sum{type MirroredType[X] = [Y] =>> List[(X, Y)]}: type `[X] =>> [Y] =>> List[(X, Y)]` is not a generic sum because its subpart `[X] =>> [Y] =>> List[(X, Y)]` is not a supported kind (either `*` or `* -> *`) diff --git a/tests/neg/i14127.check b/tests/neg/i14127.check index 969092401012..15babe8b2775 100644 --- a/tests/neg/i14127.check +++ b/tests/neg/i14127.check @@ -1,10 +1,8 @@ --- Error: tests/neg/i14127.scala:6:55 ---------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14127.scala:6:55 ---------------------------------------------------------------------- 6 | *: Int *: Int *: Int *: Int *: Int *: EmptyTuple)]] // error | ^ - |No given instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, - | Int - |, Int, Int)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, - | Int - |, Int, Int)]: + |No given instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + | Int, Int, Int)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + | Int, Int, Int)]: | * class *: is not a generic product because it reduces to a tuple with arity 23, expected arity <= 22 | * class *: is not a generic sum because it does not have subclasses diff --git a/tests/neg/i14432.check b/tests/neg/i14432.check index 793ade82212b..d19d952b0153 100644 --- a/tests/neg/i14432.check +++ b/tests/neg/i14432.check @@ -1,6 +1,6 @@ --- Error: tests/neg/i14432.scala:13:33 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14432.scala:13:33 --------------------------------------------------------------------- 13 |val mFoo = summon[Mirror.Of[Foo]] // error: no mirror found | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432a.check b/tests/neg/i14432a.check index 5f847ce30a38..705a7ed0e88b 100644 --- a/tests/neg/i14432a.check +++ b/tests/neg/i14432a.check @@ -1,6 +1,6 @@ --- Error: tests/neg/i14432a.scala:14:43 -------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14432a.scala:14:43 -------------------------------------------------------------------- 14 | val mFoo = summon[Mirror.Of[example.Foo]] // error: no mirror found | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432b.check b/tests/neg/i14432b.check index 24cb04b731ca..5b0dac3e6ad0 100644 --- a/tests/neg/i14432b.check +++ b/tests/neg/i14432b.check @@ -1,6 +1,6 @@ --- Error: tests/neg/i14432b.scala:15:43 -------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14432b.scala:15:43 -------------------------------------------------------------------- 15 | val mFoo = summon[Mirror.Of[example.Foo]] // error: no mirror found | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432c.check b/tests/neg/i14432c.check index 384235e5d379..a61e100ceb98 100644 --- a/tests/neg/i14432c.check +++ b/tests/neg/i14432c.check @@ -1,10 +1,10 @@ --- Error: tests/neg/i14432c.scala:12:18 -------------------------------------------------------------------------------- +-- [E173] Reference Error: tests/neg/i14432c.scala:12:18 --------------------------------------------------------------- 12 |class Bar extends example.Foo(23) { // error: cant access private[example] ctor | ^^^^^^^^^^^ | constructor Foo cannot be accessed as a member of example.Foo from class Bar. --- Error: tests/neg/i14432c.scala:16:43 -------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14432c.scala:16:43 -------------------------------------------------------------------- 16 | val mFoo = summon[Mirror.Of[example.Foo]] // error: no mirror | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14432d.check b/tests/neg/i14432d.check index 0701fb02ea19..aff070d90192 100644 --- a/tests/neg/i14432d.check +++ b/tests/neg/i14432d.check @@ -1,6 +1,6 @@ --- Error: tests/neg/i14432d.scala:17:45 -------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14432d.scala:17:45 -------------------------------------------------------------------- 17 | val mFoo = summon[Mirror.Of[example.Foo]] // error | ^ - |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: + |No given instance of type deriving.Mirror.Of[example.Foo] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[example.Foo]: | * class Foo is not a generic product because the constructor of class Foo is innaccessible from the calling scope. | * class Foo is not a generic sum because it is not a sealed class diff --git a/tests/neg/i14823.check b/tests/neg/i14823.check index 4d5a64680882..47b15f04e2da 100644 --- a/tests/neg/i14823.check +++ b/tests/neg/i14823.check @@ -1,6 +1,6 @@ --- Error: tests/neg/i14823.scala:8:50 ---------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14823.scala:8:50 ---------------------------------------------------------------------- 8 |val baz = summon[Mirror.Of[SubA[Int] | SubB[Int]]] // error | ^ - |No given instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]]: + |No given instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubA[Int] | SubB[Int]]: | * type `SubA[Int] | SubB[Int]` is not a generic product because its subpart `SubA[Int] | SubB[Int]` is a top-level union type. | * type `SubA[Int] | SubB[Int]` is not a generic sum because its subpart `SubA[Int] | SubB[Int]` is a top-level union type. diff --git a/tests/neg/i14823a.check b/tests/neg/i14823a.check index 9c917548d9bf..3c9b749780e0 100644 --- a/tests/neg/i14823a.check +++ b/tests/neg/i14823a.check @@ -1,24 +1,24 @@ --- Error: tests/neg/i14823a.scala:16:48 -------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14823a.scala:16:48 -------------------------------------------------------------------- 16 |val foo = summon[Mirror.Of[Box[Int] | Box[Int]]] // error | ^ - |No given instance of type deriving.Mirror.Of[Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Box[Int] | Box[Int]]: + |No given instance of type deriving.Mirror.Of[Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Box[Int] | Box[Int]]: | * type `Box[Int] | Box[Int]` is not a generic product because its subpart `Box[Int] | Box[Int]` is a top-level union type. | * type `Box[Int] | Box[Int]` is not a generic sum because its subpart `Box[Int] | Box[Int]` is a top-level union type. --- Error: tests/neg/i14823a.scala:17:58 -------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14823a.scala:17:58 -------------------------------------------------------------------- 17 |val bar = summon[MirrorK1.Of[[X] =>> Box[Int] | Box[Int]]] // error | ^ - |No given instance of type MirrorK1.Of[[X] =>> Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type MirrorK1.Of[[X] =>> Box[Int] | Box[Int]]: + |No given instance of type MirrorK1.Of[[X] =>> Box[Int] | Box[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type MirrorK1.Of[[X] =>> Box[Int] | Box[Int]]: | * type `[A] =>> Box[Int] | Box[Int]` is not a generic product because its subpart `Box[Int] | Box[Int]` is a top-level union type. | * type `[A] =>> Box[Int] | Box[Int]` is not a generic sum because its subpart `Box[Int] | Box[Int]` is a top-level union type. --- Error: tests/neg/i14823a.scala:18:63 -------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14823a.scala:18:63 -------------------------------------------------------------------- 18 |def baz = summon[deriving.Mirror.Of[Foo[String] | Foo[String]]] // error | ^ - |No given instance of type deriving.Mirror.Of[Foo[String] | Foo[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo[String] | Foo[String]]: + |No given instance of type deriving.Mirror.Of[Foo[String] | Foo[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo[String] | Foo[String]]: | * type `Foo[String] | Foo[String]` is not a generic product because its subpart `Foo[String] | Foo[String]` is a top-level union type. | * type `Foo[String] | Foo[String]` is not a generic sum because its subpart `Foo[String] | Foo[String]` is a top-level union type. --- Error: tests/neg/i14823a.scala:20:66 -------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i14823a.scala:20:66 -------------------------------------------------------------------- 20 |def qux = summon[deriving.Mirror.Of[Option[Int] | Option[String]]] // error | ^ - |No given instance of type deriving.Mirror.Of[Option[Int] | Option[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Option[Int] | Option[String]]: + |No given instance of type deriving.Mirror.Of[Option[Int] | Option[String]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Option[Int] | Option[String]]: | * type `Option[Int] | Option[String]` is not a generic product because its subpart `Option[Int] | Option[String]` is a top-level union type. | * type `Option[Int] | Option[String]` is not a generic sum because its subpart `Option[Int] | Option[String]` is a top-level union type. diff --git a/tests/neg/i15000.check b/tests/neg/i15000.check index 1a1e8e1b973b..64c222b2a52e 100644 --- a/tests/neg/i15000.check +++ b/tests/neg/i15000.check @@ -16,7 +16,9 @@ |value apply is not a member of object ExtensionMethodReproduction.c. |An extension method was tried, but could not be fully constructed: | - | apply(ExtensionMethodReproduction.c) failed with + | apply(ExtensionMethodReproduction.c) + | + | failed with: | | Ambiguous overload. The overloaded alternatives of method apply in object ExtensionMethodReproduction with types | (c: ExtensionMethodReproduction.C)(x: Int, y: Int): String diff --git a/tests/neg/i15507.check b/tests/neg/i15507.check new file mode 100644 index 000000000000..3786d559c306 --- /dev/null +++ b/tests/neg/i15507.check @@ -0,0 +1,40 @@ +-- Error: tests/neg/i15507.scala:2:40 ---------------------------------------------------------------------------------- +2 | type _NestedSet1[X] = Set[_NestedSet1[?]] // error + | ^ + | no wildcard type allowed here +-- Error: tests/neg/i15507.scala:3:41 ---------------------------------------------------------------------------------- +3 | type _NestedSet2[X] <: Set[_NestedSet2[?]] // error + | ^ + | no wildcard type allowed here +-- [E140] Cyclic Error: tests/neg/i15507.scala:5:7 --------------------------------------------------------------------- +5 | type _NestedSet4[X] >: Set[_NestedSet4[X]] // error + | ^ + | illegal cyclic type reference: lower bound ... of type _NestedSet4 refers back to the type itself +-- [E140] Cyclic Error: tests/neg/i15507.scala:6:7 --------------------------------------------------------------------- +6 | type _NestedSet5[X] = Set[_NestedSet5[X]] // error + | ^ + | illegal cyclic type reference: alias ... of type _NestedSet5 refers back to the type itself +-- [E140] Cyclic Error: tests/neg/i15507.scala:7:7 --------------------------------------------------------------------- +7 | type _NestedSet6[X] = Set[_NestedSet6[Int]] // error + | ^ + | illegal cyclic type reference: alias ... of type _NestedSet6 refers back to the type itself +-- Error: tests/neg/i15507.scala:9:43 ---------------------------------------------------------------------------------- +9 | type _NestedList1[X] = List[_NestedList1[?]] // error + | ^ + | no wildcard type allowed here +-- Error: tests/neg/i15507.scala:10:44 --------------------------------------------------------------------------------- +10 | type _NestedList2[X] <: List[_NestedList2[?]] // error + | ^ + | no wildcard type allowed here +-- [E140] Cyclic Error: tests/neg/i15507.scala:12:7 -------------------------------------------------------------------- +12 | type _NestedList4[X] >: List[_NestedList4[X]] // error + | ^ + | illegal cyclic type reference: lower bound ... of type _NestedList4 refers back to the type itself +-- [E140] Cyclic Error: tests/neg/i15507.scala:13:7 -------------------------------------------------------------------- +13 | type _NestedList5[X] = List[_NestedList5[X]] // error + | ^ + | illegal cyclic type reference: alias ... of type _NestedList5 refers back to the type itself +-- [E140] Cyclic Error: tests/neg/i15507.scala:14:7 -------------------------------------------------------------------- +14 | type _NestedList6[X] = List[_NestedList6[Int]] // error + | ^ + | illegal cyclic type reference: alias ... of type _NestedList6 refers back to the type itself diff --git a/tests/neg/i15507.scala b/tests/neg/i15507.scala index 3c45f2a8d9f6..f65d216ba6a2 100644 --- a/tests/neg/i15507.scala +++ b/tests/neg/i15507.scala @@ -1,12 +1,12 @@ object TestNested: - type _NestedSet1[X] = Set[_NestedSet1[?]] // error // error + type _NestedSet1[X] = Set[_NestedSet1[?]] // error type _NestedSet2[X] <: Set[_NestedSet2[?]] // error type _NestedSet3[X] <: Set[_NestedSet3[X]] // ok type _NestedSet4[X] >: Set[_NestedSet4[X]] // error type _NestedSet5[X] = Set[_NestedSet5[X]] // error type _NestedSet6[X] = Set[_NestedSet6[Int]] // error - type _NestedList1[X] = List[_NestedList1[?]] // error // error + type _NestedList1[X] = List[_NestedList1[?]] // error type _NestedList2[X] <: List[_NestedList2[?]] // error type _NestedList3[X] <: List[_NestedList3[X]] // ok type _NestedList4[X] >: List[_NestedList4[X]] // error diff --git a/tests/neg/i15618.check b/tests/neg/i15618.check index 0853da26c27a..099e3fe0a0b7 100644 --- a/tests/neg/i15618.check +++ b/tests/neg/i15618.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i15618.scala:17:44 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i15618.scala:17:44 --------------------------------------------------------------------- 17 | def toArray: Array[ScalaType[T]] = Array() // error | ^ | No ClassTag available for ScalaType[T] @@ -9,10 +9,16 @@ | Note: a match type could not be fully reduced: | | trying to reduce ScalaType[T] - | failed since selector T + | failed since selector T | does not match case Float16 => Float | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining cases | | case Float32 => Float | case Int32 => Int +-- [E172] Type Error: tests/neg/i15618.scala:21:33 --------------------------------------------------------------------- +21 | def toArray: Array[T] = Array() // error + | ^ + | No ClassTag available for T + | + | where: T is a type in class Tensor2 with bounds <: Int | Float diff --git a/tests/neg/i15618.scala b/tests/neg/i15618.scala index fd38c8c48f6b..087bc462b211 100644 --- a/tests/neg/i15618.scala +++ b/tests/neg/i15618.scala @@ -16,8 +16,16 @@ class Tensor[T <: DType](dtype: T): def toSeq: Seq[ScalaType[T]] = Seq() def toArray: Array[ScalaType[T]] = Array() // error +class Tensor2[T <: Int | Float](dtype: T): + def toSeq: Seq[T] = Seq() + def toArray: Array[T] = Array() // error + @main def Test = val t = Tensor(Float32) // Tensor[Float32] println(t.toSeq.headOption) // works, Seq[Float] println(t.toArray.headOption) // ClassCastException + + val t2 = Tensor2(0.0f) // Tensor2[Float] + println(t.toSeq.headOption) + println(t.toArray.headOption) diff --git a/tests/neg/i15998.check b/tests/neg/i15998.check index c745c7a84309..1f25946624cf 100644 --- a/tests/neg/i15998.check +++ b/tests/neg/i15998.check @@ -11,7 +11,7 @@ | must be more specific than CC[A] | | longer explanation available when compiling with `-explain` --- Error: tests/neg/i15998.scala:11:11 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i15998.scala:11:11 --------------------------------------------------------------------- 11 |val _ = bar // error | ^ | No implicit search was attempted for parameter x of method bar diff --git a/tests/neg/i16270a.scala b/tests/neg/i16270a.scala new file mode 100644 index 000000000000..b4a5016aaa08 --- /dev/null +++ b/tests/neg/i16270a.scala @@ -0,0 +1,25 @@ +class Outer { + type Smuggler + var smuggler: Option[Smuggler] = None +} +class Foo[T](var unpack: T) +class Evil(val outer: Outer, extract: outer.type => Unit) extends Foo[outer.type](outer) { // error + def doExtract(): Unit = extract(unpack) +} + +object Test { + def main(args: Array[String]): Unit = { + val outer1 = new Outer { type Smuggler = Int } + outer1.smuggler = Some(5) + val evil1 = new Evil(outer1, _ => ()) + + val outer2 = new Outer { type Smuggler = String } + var extractedOuter2: Option[outer2.type] = None + val evil2 = new Evil(outer2, x => extractedOuter2 = Some(x)) + + evil2.unpack = evil1.unpack + evil2.doExtract() + val smuggled: String = extractedOuter2.get.smuggler.get + println(smuggled) + } +} diff --git a/tests/neg/i16270b.scala b/tests/neg/i16270b.scala new file mode 100644 index 000000000000..d520bf7516e2 --- /dev/null +++ b/tests/neg/i16270b.scala @@ -0,0 +1,9 @@ +class Outer { + class Foo(var unpack: Outer.this.type) + + type Smuggler + var smuggler: Option[Smuggler] = None +} +class Evil(val outer: Outer, extract: outer.type => Unit) extends outer.Foo(outer) { // error + def doExtract(): Unit = extract(unpack) +} diff --git a/tests/neg/i16270c.scala b/tests/neg/i16270c.scala new file mode 100644 index 000000000000..e1d51913c1ce --- /dev/null +++ b/tests/neg/i16270c.scala @@ -0,0 +1,3 @@ +class Foo[T <: Singleton](x: T) +class Outer +class Evil(val outer: Outer) extends Foo(outer) // error (because outer.type appears in the inferred type) diff --git a/tests/neg/i16343.scala b/tests/neg/i16343.scala new file mode 100644 index 000000000000..d09ffcbe32c7 --- /dev/null +++ b/tests/neg/i16343.scala @@ -0,0 +1,2 @@ +class Issue16343: + class MyWorker extends javax.swing.SwingWorker[Unit, Unit] // error diff --git a/tests/neg/i16407.check b/tests/neg/i16407.check new file mode 100644 index 000000000000..5c6bd19ca8c1 --- /dev/null +++ b/tests/neg/i16407.check @@ -0,0 +1,12 @@ +-- Error: tests/neg/i16407.scala:2:2 ----------------------------------------------------------------------------------- +2 | f(g()) // error // error + | ^ + | cannot resolve reference to type (X.this : Y & X).A + | the classfile defining the type might be missing from the classpath + | or the self type of (X.this : Y & X) might not contain all transitive dependencies +-- Error: tests/neg/i16407.scala:2:4 ----------------------------------------------------------------------------------- +2 | f(g()) // error // error + | ^ + | cannot resolve reference to type (X.this : Y & X).A + | the classfile defining the type might be missing from the classpath + | or the self type of (X.this : Y & X) might not contain all transitive dependencies diff --git a/tests/neg/i16407.scala b/tests/neg/i16407.scala new file mode 100644 index 000000000000..ff7192390eef --- /dev/null +++ b/tests/neg/i16407.scala @@ -0,0 +1,11 @@ +trait X { self: Y => + f(g()) // error // error +} +trait Y { self: Z => + type B = A + def f(a: B): Unit = () + def g(): A = ??? +} +trait Z { + type A +} diff --git a/tests/neg/i1643.scala b/tests/neg/i1643.scala index a10422de6eab..1745539d73f5 100644 --- a/tests/neg/i1643.scala +++ b/tests/neg/i1643.scala @@ -1,4 +1,4 @@ -trait T extends Array { // error // error +trait T extends Array { // error def t1(as: String*): Array[String] = { varargs1(as*) } // error def t2(as: String*): Array[String] = { super.varargs1(as*) } // error } @@ -7,7 +7,7 @@ class C extends Base_1 { // error def c2(as: String*): Array[String] = { super.varargs1(as*) } // error } object Test extends App { - val t = new T {} // error + val t = new T {} println(t.t1("a", "b").mkString(",")) println(t.t2("a", "b").mkString(",")) val c = new C {} diff --git a/tests/neg/i16438.scala b/tests/neg/i16438.scala new file mode 100644 index 000000000000..33873b13384b --- /dev/null +++ b/tests/neg/i16438.scala @@ -0,0 +1,4 @@ +// scalac: -Ysafe-init +trait ATrait(val string: String, val int: Int) +trait AnotherTrait( override val string: String, override val int: Int) extends ATrait +case class ACaseClass(override val string: String) extends AnotherTrait(string, 3) // error diff --git a/tests/neg/i16452.check b/tests/neg/i16452.check new file mode 100644 index 000000000000..df4247aabc12 --- /dev/null +++ b/tests/neg/i16452.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i16452.scala:2:8 ----------------------------------------------------------------------------------- +2 |// error + | ^ + | indented definitions expected, eof found diff --git a/tests/neg/i16452.scala b/tests/neg/i16452.scala new file mode 100644 index 000000000000..d2b6c565a684 --- /dev/null +++ b/tests/neg/i16452.scala @@ -0,0 +1,2 @@ +val x = Seq(1, 2, 3).map: +// error \ No newline at end of file diff --git a/tests/neg/i16453.check b/tests/neg/i16453.check new file mode 100644 index 000000000000..e01ddf5cab7a --- /dev/null +++ b/tests/neg/i16453.check @@ -0,0 +1,45 @@ +-- [E172] Type Error: tests/neg/i16453.scala:21:19 --------------------------------------------------------------------- +21 | summon[List[Int]] // error + | ^ + | No given instance of type List[Int] was found for parameter x of method summon in object Predef +-- [E172] Type Error: tests/neg/i16453.scala:23:21 --------------------------------------------------------------------- +23 | summon[Option[Int]] // error + | ^ + |No given instance of type Option[Int] was found for parameter x of method summon in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Option[Int]: + |- final lazy given val baz3: Char + |- final lazy given val bar3: Int +-- [E172] Type Error: tests/neg/i16453.scala:24:26 --------------------------------------------------------------------- +24 | implicitly[Option[Char]] // error + | ^ + |No given instance of type Option[Char] was found for parameter e of method implicitly in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Option[Char]: + |- final lazy given val baz3: Char +-- [E172] Type Error: tests/neg/i16453.scala:25:20 --------------------------------------------------------------------- +25 | implicitly[String] // error + | ^ + |No given instance of type String was found for parameter e of method implicitly in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to String: + |- final lazy given val baz3: Char +-- [E172] Type Error: tests/neg/i16453.scala:35:16 --------------------------------------------------------------------- +35 | summon[String] // error + | ^ + |No given instance of type String was found for parameter x of method summon in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to String: + |- implicit val baz2: Char +-- [E172] Type Error: tests/neg/i16453.scala:36:25 --------------------------------------------------------------------- +36 | implicitly[Option[Int]] // error + | ^ + |No given instance of type Option[Int] was found for parameter e of method implicitly in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Option[Int]: + |- implicit val bar2: Int diff --git a/tests/neg/i16453.scala b/tests/neg/i16453.scala new file mode 100644 index 000000000000..00495c39e21a --- /dev/null +++ b/tests/neg/i16453.scala @@ -0,0 +1,37 @@ +import scala.language.implicitConversions + +trait Foo { type T } + +// This one is irrelevant, shouldn't be included in error message +given irrelevant: Long = ??? + +/** Use Scala 3 givens/conversions */ +def testScala3() = { + given c1[T]: Conversion[T, Option[T]] = ??? + given c2[F <: Foo](using f: F): Conversion[f.T, Option[f.T]] = ??? + given Conversion[Char, String] = ??? + given Conversion[Char, Option[Int]] = ??? + + given foo: Foo with + type T = Int + given bar3: Int = 0 + given baz3: Char = 'a' + + // This should get the usual error + summon[List[Int]] // error + + summon[Option[Int]] // error + implicitly[Option[Char]] // error + implicitly[String] // error +} + +/** Use Scala 2 implicits */ +def testScala2() = { + implicit def toOpt[T](t: T): Option[T] = ??? + implicit def char2Str(c: Char): String = ??? + implicit val bar2: Int = 1 + implicit val baz2: Char = 'b' + + summon[String] // error + implicitly[Option[Int]] // error +} diff --git a/tests/neg/i16464.scala b/tests/neg/i16464.scala new file mode 100644 index 000000000000..dfc4cd3da3c3 --- /dev/null +++ b/tests/neg/i16464.scala @@ -0,0 +1,6 @@ + +implicit final class SomeOps(e: Int) extends AnyVal: + def -(other: Seq[Int]) = List(1) + def -(other: Seq[Long]) = List(2) // error: double definition + +def main(): Unit = 1 - Seq.empty[Int] diff --git a/tests/neg/i16601.check b/tests/neg/i16601.check new file mode 100644 index 000000000000..25baef04e479 --- /dev/null +++ b/tests/neg/i16601.check @@ -0,0 +1,6 @@ +-- [E042] Type Error: tests/neg/i16601.scala:1:27 ---------------------------------------------------------------------- +1 |@main def Test: Unit = new concurrent.ExecutionContext // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ExecutionContext is a trait; it cannot be instantiated + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16601.scala b/tests/neg/i16601.scala new file mode 100644 index 000000000000..2e058db0093c --- /dev/null +++ b/tests/neg/i16601.scala @@ -0,0 +1 @@ +@main def Test: Unit = new concurrent.ExecutionContext // error \ No newline at end of file diff --git a/tests/neg/i16653.check b/tests/neg/i16653.check new file mode 100644 index 000000000000..dd5c756f6f79 --- /dev/null +++ b/tests/neg/i16653.check @@ -0,0 +1,6 @@ +-- [E006] Not Found Error: tests/neg/i16653.scala:1:7 ------------------------------------------------------------------ +1 |import demo.implicits._ // error + | ^^^^ + | Not found: demo + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16653.scala b/tests/neg/i16653.scala new file mode 100644 index 000000000000..3be14d1bc6bf --- /dev/null +++ b/tests/neg/i16653.scala @@ -0,0 +1,3 @@ +import demo.implicits._ // error +import demo._ +object Demo {} \ No newline at end of file diff --git a/tests/neg/i16655.check b/tests/neg/i16655.check new file mode 100644 index 000000000000..e1335b624244 --- /dev/null +++ b/tests/neg/i16655.check @@ -0,0 +1,6 @@ +-- [E052] Type Error: tests/neg/i16655.scala:3:4 ----------------------------------------------------------------------- +3 | x = 5 // error + | ^^^^^ + | Reassignment to val x + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16655.scala b/tests/neg/i16655.scala new file mode 100644 index 000000000000..c758678d9896 --- /dev/null +++ b/tests/neg/i16655.scala @@ -0,0 +1,3 @@ +object Test: + val x = "MyString" + x = 5 // error diff --git a/tests/neg/i16696.check b/tests/neg/i16696.check new file mode 100644 index 000000000000..2cac6a9c595a --- /dev/null +++ b/tests/neg/i16696.check @@ -0,0 +1,12 @@ +-- Error: tests/neg/i16696.scala:7:29 ---------------------------------------------------------------------------------- +7 | val boom1 = BoxMaker[Some].make1 // error + | ^ + | Some is not a value type, cannot be used in intersection Some & Int +-- Error: tests/neg/i16696.scala:8:29 ---------------------------------------------------------------------------------- +8 | val boom2 = BoxMaker[Some].make2 // error + | ^ + | Some is not a value type, cannot be used in union Some | Int +-- Error: tests/neg/i16696.scala:20:27 --------------------------------------------------------------------------------- +20 | val boom = BoxMaker[Foo].make(_.foo) // error + | ^ + | test2.Foo is not a value type, cannot be used in intersection R & test2.Foo diff --git a/tests/neg/i16696.scala b/tests/neg/i16696.scala new file mode 100644 index 000000000000..f54b884960fa --- /dev/null +++ b/tests/neg/i16696.scala @@ -0,0 +1,20 @@ +object test1: + class BoxMaker[T] { + def make1: T & Int = ??? + def make2: T | Int = ??? + } + + val boom1 = BoxMaker[Some].make1 // error + val boom2 = BoxMaker[Some].make2 // error + +object test2: + class Box[R] + + class BoxMaker[T] { + def make[R <: T](f: T => Box[R]): Box[R & T] = ??? + } + + trait Foo[A]{ + def foo: Box[Foo[Unit]] + } + val boom = BoxMaker[Foo].make(_.foo) // error diff --git a/tests/neg/i16820.check b/tests/neg/i16820.check new file mode 100644 index 000000000000..48824d683244 --- /dev/null +++ b/tests/neg/i16820.check @@ -0,0 +1,30 @@ +-- [E178] Type Error: tests/neg/i16820.scala:5:11 ---------------------------------------------------------------------- +5 | val x1 = f // error + | ^ + | missing argument list for method f in object Test + | + | def f(xs: Int*): Int + | + | longer explanation available when compiling with `-explain` +-- [E100] Syntax Error: tests/neg/i16820.scala:6:11 -------------------------------------------------------------------- +6 | val x2 = g // error + | ^ + | method g in object Test must be called with () argument + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i16820.scala:7:40 ---------------------------------------------------------------------- +7 | val x3 = java.nio.file.Paths.get(".").toRealPath // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | missing argument list for method toRealPath in trait Path + | + | def toRealPath(x$0: java.nio.file.LinkOption*): java.nio.file.Path + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i16820.scala:11:14 --------------------------------------------------------------------- +11 |def test = Foo(3) // error + | ^^^^^^ + | missing argument list for method apply in object Foo + | + | def apply(x: Int)(xs: String*): Foo + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16820.scala b/tests/neg/i16820.scala new file mode 100644 index 000000000000..abdc741b9f0e --- /dev/null +++ b/tests/neg/i16820.scala @@ -0,0 +1,11 @@ +object Test: + def f(xs: Int*) = xs.sum + def g() = 1 + + val x1 = f // error + val x2 = g // error + val x3 = java.nio.file.Paths.get(".").toRealPath // error + +// #14567 +case class Foo(x: Int)(xs: String*) +def test = Foo(3) // error diff --git a/tests/neg/i16842.scala b/tests/neg/i16842.scala new file mode 100644 index 000000000000..1e7e5cc14339 --- /dev/null +++ b/tests/neg/i16842.scala @@ -0,0 +1,25 @@ +sealed trait Expr1 +sealed trait Literal extends Expr1 + +case class ArrayLiter(elems: List[Expr1]) extends Literal + +sealed trait SemanticType { + type T // the type with which a literal of this semanticType is represented +} +case object SemanticInt extends SemanticType { + type T = Int +} + +case class SemanticArray[U <: SemanticType](dim: Int) extends SemanticType { + type T = List[U] +} + +sealed trait Expr2[+T] +class Liter[T <: SemanticType](val ty: T, val value: ty.T) extends Expr2[T] + +def typecheckArrayLiter( + a: ArrayLiter +): Liter[SemanticArray[SemanticType]] = { + val x: List[Expr2[SemanticInt.type]] = List() + Liter(SemanticArray[SemanticInt.type], x) // error // error +} diff --git a/tests/neg/i16850.check b/tests/neg/i16850.check new file mode 100644 index 000000000000..6c9c7f7e0eac --- /dev/null +++ b/tests/neg/i16850.check @@ -0,0 +1,10 @@ +-- [E007] Type Mismatch Error: tests/neg/i16850.scala:7:33 ------------------------------------------------------------- +7 | def add(elm: Y): Unit = list = elm :: list // error + | ^^^ + | Found: (elm : Y) + | Required: Class.this.Y² + | + | where: Y is a type in class Class + | Y² is a type in trait Trait + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16850.scala b/tests/neg/i16850.scala new file mode 100644 index 000000000000..e7904fcd44e7 --- /dev/null +++ b/tests/neg/i16850.scala @@ -0,0 +1,10 @@ + +trait Trait : + type Y + var list: List[Y] = Nil + +class Class[Y] extends Trait : + def add(elm: Y): Unit = list = elm :: list // error + +object Object extends Class[Int] : + add(42) diff --git a/tests/neg/i16861.scala b/tests/neg/i16861.scala new file mode 100644 index 000000000000..50c56974d027 --- /dev/null +++ b/tests/neg/i16861.scala @@ -0,0 +1,2 @@ +given foo[T]: Any = summon[bar] // error +def bar: Nothing = ??? \ No newline at end of file diff --git a/tests/neg/i16861a.scala b/tests/neg/i16861a.scala new file mode 100644 index 000000000000..b93f884f5e56 --- /dev/null +++ b/tests/neg/i16861a.scala @@ -0,0 +1,4 @@ +import scala.quoted.* +trait Foo +object Foo: + inline given foo[T <: Foo]: T = summon[Type.of[T]] // error diff --git a/tests/neg/i16920.check b/tests/neg/i16920.check new file mode 100644 index 000000000000..131ba4c6265e --- /dev/null +++ b/tests/neg/i16920.check @@ -0,0 +1,88 @@ +-- [E008] Not Found Error: tests/neg/i16920.scala:20:11 ---------------------------------------------------------------- +20 | "five".wow // error + | ^^^^^^^^^^ + | value wow is not a member of String. + | An extension method was tried, but could not be fully constructed: + | + | Two.wow("five") + | + | failed with: + | + | Found: ("five" : String) + | Required: Int +-- [E008] Not Found Error: tests/neg/i16920.scala:28:6 ----------------------------------------------------------------- +28 | 5.wow // error + | ^^^^^ + | value wow is not a member of Int. + | An extension method was tried, but could not be fully constructed: + | + | AlsoFails.wow(5) + | + | failed with: + | + | Found: (5 : Int) + | Required: Boolean +-- [E008] Not Found Error: tests/neg/i16920.scala:29:11 ---------------------------------------------------------------- +29 | "five".wow // error + | ^^^^^^^^^^ + | value wow is not a member of String. + | An extension method was tried, but could not be fully constructed: + | + | AlsoFails.wow("five") + | + | failed with: + | + | Found: ("five" : String) + | Required: Boolean +-- [E008] Not Found Error: tests/neg/i16920.scala:36:6 ----------------------------------------------------------------- +36 | 5.wow // error + | ^^^^^ + | value wow is not a member of Int. + | An extension method was tried, but could not be fully constructed: + | + | Three.wow(5) + | + | failed with: + | + | Ambiguous extension methods: + | both Three.wow(5) + | and Two.wow(5) + | are possible expansions of 5.wow +-- [E008] Not Found Error: tests/neg/i16920.scala:44:11 ---------------------------------------------------------------- +44 | "five".wow // error + | ^^^^^^^^^^ + | value wow is not a member of String. + | An extension method was tried, but could not be fully constructed: + | + | Two.wow("five") + | + | failed with: + | + | Found: ("five" : String) + | Required: Int +-- [E008] Not Found Error: tests/neg/i16920.scala:51:11 ---------------------------------------------------------------- +51 | "five".wow // error + | ^^^^^^^^^^ + | value wow is not a member of String. + | An extension method was tried, but could not be fully constructed: + | + | Two.wow("five") + | + | failed with: + | + | Found: ("five" : String) + | Required: Int +-- [E008] Not Found Error: tests/neg/i16920.scala:58:6 ----------------------------------------------------------------- +58 | 5.wow // error + | ^^^^^ + | value wow is not a member of Int. + | An extension method was tried, but could not be fully constructed: + | + | Three.wow(5) + | + | failed with: + | + | Ambiguous extension methods: + | both Three.wow(5) + | and Two.wow(5) + | are possible expansions of 5.wow diff --git a/tests/neg/i16920.scala b/tests/neg/i16920.scala new file mode 100644 index 000000000000..38345e811c1f --- /dev/null +++ b/tests/neg/i16920.scala @@ -0,0 +1,59 @@ +import language.experimental.relaxedExtensionImports + +object One: + extension (s: String) + def wow: Unit = println(s) + +object Two: + extension (i: Int) + def wow: Unit = println(i) + +object Three: + extension (i: Int) + def wow: Unit = println(i) + +object Fails: + import One._ + def test: Unit = + import Two._ + 5.wow + "five".wow // error + +object AlsoFails: + extension (s: Boolean) + def wow = println(s) + import One._ + import Two._ + def test: Unit = + 5.wow // error + "five".wow // error + +object Fails2: + import One._ + import Two._ + import Three._ + def test: Unit = + 5.wow // error + "five".wow // ok + +object Fails3: + import One._ + import Two.wow + def test: Unit = + 5.wow // ok + "five".wow // error + +object Fails4: + import Two.wow + import One._ + def test: Unit = + 5.wow // ok + "five".wow // error + +object Fails5: + import One.wow + import Two.wow + import Three.wow + def test: Unit = + 5.wow // error + "five".wow // ok \ No newline at end of file diff --git a/tests/neg/i17002.scala b/tests/neg/i17002.scala new file mode 100644 index 000000000000..c2a21dd3d415 --- /dev/null +++ b/tests/neg/i17002.scala @@ -0,0 +1,20 @@ +import scala.annotation.compileTimeOnly + +sealed trait Test[T] + +object Test: + @compileTimeOnly("Error") + given test0[T]: Test[T] = ??? + + @compileTimeOnly("Error") + given test1[T]: Test[T]() + + @compileTimeOnly("Error") + implicit class ic(x: Int): + def foo = 2 + + test0 // error + + test1 // error + + 2.foo // error \ No newline at end of file diff --git a/tests/neg/i17021.ext-java/A.java b/tests/neg/i17021.ext-java/A.java new file mode 100644 index 000000000000..536e9caa4a38 --- /dev/null +++ b/tests/neg/i17021.ext-java/A.java @@ -0,0 +1,6 @@ +// Derives from run/i17021.defs, but with a Java protected member +package p1; + +public class A { + protected int foo() { return 1; } +} diff --git a/tests/neg/i17021.ext-java/Test.scala b/tests/neg/i17021.ext-java/Test.scala new file mode 100644 index 000000000000..c700ed8138d7 --- /dev/null +++ b/tests/neg/i17021.ext-java/Test.scala @@ -0,0 +1,14 @@ +// Derives from run/i17021.defs +// but with a Java protected member +// which leads to a compile error +package p2: + trait B extends p1.A: + def bar: Int = foo // error: method bar accesses protected method foo inside a concrete trait method: use super.foo instead + + class C extends B: + override def foo: Int = 2 + +object Test: + def main(args: Array[String]): Unit = + val n = new p2.C().bar + assert(n == 2, n) diff --git a/tests/neg/i17089.scala b/tests/neg/i17089.scala new file mode 100644 index 000000000000..46968aa6f093 --- /dev/null +++ b/tests/neg/i17089.scala @@ -0,0 +1,4 @@ +object o: + trait T private[o]() + +def test = new o.T { } // error diff --git a/tests/neg/i17122.check b/tests/neg/i17122.check new file mode 100644 index 000000000000..683908c5af0f --- /dev/null +++ b/tests/neg/i17122.check @@ -0,0 +1,5 @@ +-- [E172] Type Error: tests/neg/i17122.scala:7:14 ---------------------------------------------------------------------- +7 |def test = m() // error + | ^ + | No given instance of type A was found for parameter of C + | Where C is an alias of: (A) ?=> B diff --git a/tests/neg/i17122.scala b/tests/neg/i17122.scala new file mode 100644 index 000000000000..fcf9af106488 --- /dev/null +++ b/tests/neg/i17122.scala @@ -0,0 +1,7 @@ +case class A() +case class B() + +type C = A ?=> B +def m(): C = ??? + +def test = m() // error diff --git a/tests/neg/i17123.check b/tests/neg/i17123.check new file mode 100644 index 000000000000..e858de67b73a --- /dev/null +++ b/tests/neg/i17123.check @@ -0,0 +1,86 @@ +-- [E100] Syntax Error: tests/neg/i17123.scala:7:2 --------------------------------------------------------------------- +7 | m1 // error + | ^^ + | method m1 in object ConfusingErrorMessage must be called with () argument + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:9:2 ----------------------------------------------------------------------- +9 | m2 // error + | ^^ + | missing argument list for method m2 in object ConfusingErrorMessage + | + | def m2()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:10:4 ---------------------------------------------------------------------- +10 | m2() // error + | ^^^^ + | missing argument list for method m2 in object ConfusingErrorMessage + | + | def m2()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:11:2 ---------------------------------------------------------------------- +11 | m3 // error + | ^^ + | missing argument list for method m3 in object ConfusingErrorMessage + | + | def m3()()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:12:4 ---------------------------------------------------------------------- +12 | m3() // error + | ^^^^ + | missing argument list for method m3 in object ConfusingErrorMessage + | + | def m3()()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:13:6 ---------------------------------------------------------------------- +13 | m3()() // error + | ^^^^^^ + | missing argument list for method m3 in object ConfusingErrorMessage + | + | def m3()()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:15:2 ---------------------------------------------------------------------- +15 | f3 // error + | ^^ + | missing argument list for method f3 in object ConfusingErrorMessage + | + | def f3()(i: Int)(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:16:2 ---------------------------------------------------------------------- +16 | f3() // error + | ^^^^ + | missing argument list for method f3 in object ConfusingErrorMessage + | + | def f3()(i: Int)(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:17:6 ---------------------------------------------------------------------- +17 | f3()(2) // error + | ^^^^^^^ + | missing argument list for method f3 in object ConfusingErrorMessage + | + | def f3()(i: Int)(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:19:2 ---------------------------------------------------------------------- +19 | i3 // error + | ^^ + | missing argument list for method i3 in object ConfusingErrorMessage + | + | def i3()(using d: DummyImplicit)(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:20:2 ---------------------------------------------------------------------- +20 | i3() // error + | ^^^^ + | missing argument list for method i3 in object ConfusingErrorMessage + | + | def i3()(using d: DummyImplicit)(): Unit + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i17123.scala b/tests/neg/i17123.scala new file mode 100644 index 000000000000..6547a375fec3 --- /dev/null +++ b/tests/neg/i17123.scala @@ -0,0 +1,22 @@ +object ConfusingErrorMessage { + def m1() = () + def m2()() = () + def m3()()() = () + def f3()(i: Int)() = () + def i3()(using d: DummyImplicit)() = () + m1 // error + m1() + m2 // error + m2() // error + m3 // error + m3() // error + m3()() // error + m3()()() + f3 // error + f3() // error + f3()(2) // error + f3()(2)() + i3 // error + i3() // error + i3()() +} diff --git a/tests/neg/i17168.scala b/tests/neg/i17168.scala new file mode 100644 index 000000000000..c31889c979b7 --- /dev/null +++ b/tests/neg/i17168.scala @@ -0,0 +1,3 @@ +type F[X <: String] = X + +val a = summon[F[Int] =:= Int] // error diff --git a/tests/neg/i17266.check b/tests/neg/i17266.check new file mode 100644 index 000000000000..7e07e3d43de4 --- /dev/null +++ b/tests/neg/i17266.check @@ -0,0 +1,88 @@ +-- [E181] Potential Issue Error: tests/neg/i17266.scala:4:2 ------------------------------------------------------------ +4 | synchronized { // error + | ^^^^^^^^^^^^ + | Suspicious top-level unqualified call to synchronized + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as synchronized are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + --------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:17:2 ----------------------------------------------------------- +17 | synchronized { // error + | ^^^^^^^^^^^^ + | Suspicious top-level unqualified call to synchronized + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as synchronized are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + -------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:108:2 ---------------------------------------------------------- +108 | wait() // error + | ^^^^ + | Suspicious top-level unqualified call to wait + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as wait are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:115:2 ---------------------------------------------------------- +115 | wait() // error + | ^^^^ + | Suspicious top-level unqualified call to wait + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as wait are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:121:2 ---------------------------------------------------------- +121 | wait(10) // error + | ^^^^ + | Suspicious top-level unqualified call to wait + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as wait are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:128:2 ---------------------------------------------------------- +128 | wait(10) // error + | ^^^^ + | Suspicious top-level unqualified call to wait + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as wait are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:134:2 ---------------------------------------------------------- +134 | hashCode() // error + | ^^^^^^^^ + | Suspicious top-level unqualified call to hashCode + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as hashCode are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:141:2 ---------------------------------------------------------- +141 | hashCode() // error + | ^^^^^^^^ + | Suspicious top-level unqualified call to hashCode + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as hashCode are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i17266.scala b/tests/neg/i17266.scala new file mode 100644 index 000000000000..5b74ea76810b --- /dev/null +++ b/tests/neg/i17266.scala @@ -0,0 +1,144 @@ +// scalac: -Werror -explain + +def test1 = + synchronized { // error + println("hello") + } + +def test2 = + this.synchronized { // not an error (should be?) + println("hello") + } + +object MyLib + +def test3 = + import MyLib.* + synchronized { // error + println("hello") + } + +def test4 = + 1.synchronized { // not an error (should be?) + println("hello") + } + +object Test4: + synchronized { // not an error + println("hello") + } + +object Test5: + def test5 = + synchronized { // not an error + println("hello") + } + +object Test6: + import MyLib.* + synchronized { // not an error + println("hello") + } + +object Test7: + import MyLib.* + def test7 = + synchronized { // not an error + println("hello") + } + +/* +object Test7b: + def test8 = + import MyLib.* + synchronized { // already an error: Reference to synchronized is ambiguous. + println("hello") + } +*/ + +class Test8: + synchronized { // not an error + println("hello") + } + +class Test9: + def test5 = + synchronized { // not an error + println("hello") + } + +class Test10: + import MyLib.* + synchronized { // not an error + println("hello") + } + +class Test11: + import MyLib.* + def test7 = + synchronized { // not an error + println("hello") + } + +trait Test12: + synchronized { // not an error + println("hello") + } + +trait Test13: + def test5 = + synchronized { // not an error + println("hello") + } + +trait Test14: + import MyLib.* + synchronized { // not an error + println("hello") + } + +trait Test15: + import MyLib.* + def test7 = + synchronized { // not an error + println("hello") + } + +def test16 = + wait() // error + +def test17 = + this.wait() // not an error (should be?) + +def test18 = + import MyLib.* + wait() // error + +def test19 = + 1.wait() // not an error (should be?) + +def test20 = + wait(10) // error + +def test21 = + this.wait(10) // not an error (should be?) + +def test22 = + import MyLib.* + wait(10) // error + +def test23 = + 1.wait(10) // not an error (should be?) + +def test24 = + hashCode() // error + +def test25 = + this.hashCode() // not an error (should be?) + +def test26 = + import MyLib.* + hashCode() // error + +def test27 = + 1.hashCode()// not an error (should be? probably not) diff --git a/tests/neg/i1730.scala b/tests/neg/i1730.scala new file mode 100644 index 000000000000..d88d3c007002 --- /dev/null +++ b/tests/neg/i1730.scala @@ -0,0 +1,7 @@ +import scala.reflect.ClassTag + +@main def Test = + val x: Array[? <: String] = Array[Int & Nothing]() // error: No ClassTag available for Int & Nothing + // (was: ClassCastException: [I cannot be cast to [Ljava.lang.String) + val y: Array[? <: Int] = Array[String & Nothing]() // error: No ClassTag available for String & Nothing + // (was: ClassCastException: [Lscala.runtime.Nothing$; cannot be cast to [I) diff --git a/tests/neg/i18109.scala b/tests/neg/i18109.scala new file mode 100644 index 000000000000..7df13b0c36ff --- /dev/null +++ b/tests/neg/i18109.scala @@ -0,0 +1,11 @@ +package foo {} + +package bar { + object Test { + def qux[A] = 123 + def main(args: Array[String]): Unit = { + val y = qux[foo.type] // error + val x = valueOf[foo.type] // error + } + } +} \ No newline at end of file diff --git a/tests/neg/i3935.scala b/tests/neg/i3935.scala new file mode 100644 index 000000000000..07515a4c9ff9 --- /dev/null +++ b/tests/neg/i3935.scala @@ -0,0 +1,10 @@ +enum Foo3[T](x: T) { + case Bar[S, T](y: T) extends Foo3[y.type](y) // error +} + +// val foo: Foo3.Bar[Nothing, 3] = Foo3.Bar(3) +// val bar = foo + +// def baz[T](f: Foo3[T]): f.type = f + +// val qux = baz(bar) // existentials are back in Dotty? diff --git a/tests/neg/i4373b.scala b/tests/neg/i4373b.scala index 45b60a46c721..93d967ef7778 100644 --- a/tests/neg/i4373b.scala +++ b/tests/neg/i4373b.scala @@ -1,5 +1,5 @@ // ==> 05bef7805687ba94da37177f7568e3ba7da1f91c.scala <== class x0 { - x1: - x0 | _ // error -// error \ No newline at end of file + x1: // error + x0 | _ + // error \ No newline at end of file diff --git a/tests/neg/i4820.scala b/tests/neg/i4820.scala deleted file mode 100644 index e19183b17b14..000000000000 --- a/tests/neg/i4820.scala +++ /dev/null @@ -1,2 +0,0 @@ -class Foo[A] -class Bar[A] extends Foo // error diff --git a/tests/neg/i4820b.scala b/tests/neg/i4820b.scala deleted file mode 100644 index 4a7b3da3fb1b..000000000000 --- a/tests/neg/i4820b.scala +++ /dev/null @@ -1,5 +0,0 @@ -trait SetOps[A, +C <: SetOps[A, C]] { - def concat(that: Iterable[A]): C = ??? -} - -class Set1[A] extends SetOps // error: should be SetOps[A, Set1[A]] diff --git a/tests/neg/i4820c.scala b/tests/neg/i4820c.scala deleted file mode 100644 index 6956b23363b5..000000000000 --- a/tests/neg/i4820c.scala +++ /dev/null @@ -1,2 +0,0 @@ -trait Foo[A] -class Bar[A] extends Foo // error \ No newline at end of file diff --git a/tests/neg/i4986a.check b/tests/neg/i4986a.check index 3aac0a7b2cf3..141f3fa8aacb 100644 --- a/tests/neg/i4986a.check +++ b/tests/neg/i4986a.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i4986a.scala:6:57 ---------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986a.scala:6:57 ---------------------------------------------------------------------- 6 | def foo(l: Lst[Int]) = l.map[Int, List[String]](x => 1) // error | ^ |Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[Int].. diff --git a/tests/neg/i4986c.check b/tests/neg/i4986c.check index a5fe0cee26bf..8befc30f5a60 100644 --- a/tests/neg/i4986c.check +++ b/tests/neg/i4986c.check @@ -1,64 +1,64 @@ --- Error: tests/neg/i4986c.scala:38:8 ---------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:38:8 ---------------------------------------------------------------------- 38 | test.f // error | ^ | Missing X$Y for Test[Char] --- Error: tests/neg/i4986c.scala:39:13 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:39:13 --------------------------------------------------------------------- 39 | test.g[Int] // error | ^ | Missing Outer[Int] with OuterMember = pkg.Outer[Int]#OuterMember --- Error: tests/neg/i4986c.scala:40:13 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:40:13 --------------------------------------------------------------------- 40 | test.h[X$Y] // error | ^ | Missing Outer[pkg.X$Y] with OuterMember = pkg.Outer[pkg.X$Y]#OuterMember --- Error: tests/neg/i4986c.scala:41:24 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:41:24 --------------------------------------------------------------------- 41 | test.i[Option[String]] // error | ^ | Missing implicit outer param of type Outer[Option[String]] for Test[Char] --- Error: tests/neg/i4986c.scala:42:43 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:42:43 --------------------------------------------------------------------- 42 | test.j[(Long, Long), Int | String, Array] // error | ^ |Missing Inner[Int | String, Array] with InnerMember = pkg.Outer[(Long, Long)]#Inner[Int | String, Array]#InnerMember from Outer[(Long, Long)] with OuterMember = pkg.Outer[(Long, Long)]#OuterMember --- Error: tests/neg/i4986c.scala:43:53 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:43:53 --------------------------------------------------------------------- 43 | test.k[Either[String, Any], Seq[Seq[Char]], Vector] // error | ^ | Missing implicit inner param of type Outer[Either[String, Any]]#Inner[Seq[Seq[Char]], Vector] for Test[Char] --- Error: tests/neg/i4986c.scala:45:87 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:45:87 --------------------------------------------------------------------- 45 | implicitly[Outer[Option[String] | List[Iterable[Char]]] { type MyType = BigDecimal }] // error | ^ - |Missing Outer[Option[String] | List[Iterable[Char]]] with OuterMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{MyType = BigDecimal}#OuterMember --- Error: tests/neg/i4986c.scala:46:106 -------------------------------------------------------------------------------- + |Missing Outer[Option[String] | List[Iterable[Char]]] with OuterMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{type MyType = BigDecimal}#OuterMember +-- [E172] Type Error: tests/neg/i4986c.scala:46:106 -------------------------------------------------------------------- 46 | implicitly[(Outer[Option[String] | List[Iterable[Char]]] { type MyType = BigDecimal })#Inner[Byte, Seq]] // error | ^ - |Missing Inner[Byte, Seq] with InnerMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{MyType = BigDecimal}#Inner[Byte, Seq]#InnerMember from Outer[Option[String] | List[Iterable[Char]]] with OuterMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{MyType = BigDecimal}#OuterMember --- Error: tests/neg/i4986c.scala:47:33 --------------------------------------------------------------------------------- + |Missing Inner[Byte, Seq] with InnerMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{type MyType = BigDecimal}#Inner[Byte, Seq]#InnerMember from Outer[Option[String] | List[Iterable[Char]]] with OuterMember = pkg.Outer[Option[String] | List[Iterable[Char]]]{type MyType = BigDecimal}#OuterMember +-- [E172] Type Error: tests/neg/i4986c.scala:47:33 --------------------------------------------------------------------- 47 | implicitly[Outer[Int] @myAnnot] // error | ^ | Missing Outer[Int] with OuterMember = pkg.Outer[Int] @myAnnot#OuterMember --- Error: tests/neg/i4986c.scala:52:52 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:52:52 --------------------------------------------------------------------- 52 | implicitly[Outer[Int] { type OuterMember = Long }] // error | ^ | Missing Outer[Int] with OuterMember = Long --- Error: tests/neg/i4986c.scala:53:24 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:53:24 --------------------------------------------------------------------- 53 | implicitly[outer.type] // error | ^ | Missing Outer[Int] with OuterMember = pkg.Test.outer.OuterMember --- Error: tests/neg/i4986c.scala:54:104 -------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:54:104 -------------------------------------------------------------------- 54 | implicitly[(Outer[Int] { type OuterMember = Long })#Inner[Long, Iterator] { type InnerMember = Byte }] // error | ^ | Missing Inner[Long, Iterator] with InnerMember = Byte from Outer[Int] with OuterMember = Long --- Error: tests/neg/i4986c.scala:55:69 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:55:69 --------------------------------------------------------------------- 55 | implicitly[outer.Inner[Long, Iterator] { type InnerMember = Byte }] // error | ^ |Missing Inner[Long, Iterator] with InnerMember = Byte from Outer[Int] with OuterMember = pkg.Test.outer.OuterMember --- Error: tests/neg/i4986c.scala:56:24 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:56:24 --------------------------------------------------------------------- 56 | implicitly[inner.type] // error | ^ |Missing Inner[Long, Iterator] with InnerMember = pkg.Test.inner.InnerMember from Outer[Int] with OuterMember = pkg.Test.outer.OuterMember --- Error: tests/neg/i4986c.scala:58:33 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:58:33 --------------------------------------------------------------------- 58 | implicitly[U[Int, Option, Map]] // error | ^ | There's no U[Int, Option, Map] --- Error: tests/neg/i4986c.scala:62:19 --------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i4986c.scala:62:19 --------------------------------------------------------------------- 62 | i.m[Option[Long]] // error | ^ | String; List; [A, _] =>> List[Option[?]]; Int; Option[Long]; diff --git a/tests/neg/i5498-postfixOps.check b/tests/neg/i5498-postfixOps.check index 59568a7fd9f3..d41862364270 100644 --- a/tests/neg/i5498-postfixOps.check +++ b/tests/neg/i5498-postfixOps.check @@ -10,7 +10,7 @@ | expression expected but ')' found | | longer explanation available when compiling with `-explain` --- Error: tests/neg/i5498-postfixOps.scala:6:0 ------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i5498-postfixOps.scala:6:0 ------------------------------------------------------------- 6 | Seq(1, 2).filter(List(1,2) contains) // error: usage of postfix operator // error |^ |No given instance of type scala.concurrent.duration.DurationConversions.Classifier[Null] was found for parameter ev of method second in trait DurationConversions diff --git a/tests/pos/i5636.scala b/tests/neg/i5636.scala similarity index 72% rename from tests/pos/i5636.scala rename to tests/neg/i5636.scala index 0a38439d718e..9c3b30af801a 100644 --- a/tests/pos/i5636.scala +++ b/tests/neg/i5636.scala @@ -4,6 +4,6 @@ trait Bar[X] { def foo: X = ??? } // same for `class Foo(...)...` -trait Foo(val a: A) extends Bar[a.type] { +trait Foo(val a: A) extends Bar[a.type] { // error val same: a.type = foo } diff --git a/tests/pending/neg/i5690.scala b/tests/neg/i5690.scala similarity index 100% rename from tests/pending/neg/i5690.scala rename to tests/neg/i5690.scala diff --git a/tests/neg/i6056.scala b/tests/neg/i6056.scala index ad68616eecc2..8e39b0e4631c 100644 --- a/tests/neg/i6056.scala +++ b/tests/neg/i6056.scala @@ -2,6 +2,6 @@ object i0{ import i0.i0 // error // error def i0={ import _ // error - import // error + import } // error } \ No newline at end of file diff --git a/tests/neg/i6183.check b/tests/neg/i6183.check index 70c1afaae621..6c7e96f1088a 100644 --- a/tests/neg/i6183.check +++ b/tests/neg/i6183.check @@ -4,7 +4,9 @@ | value render is not a member of Int. | An extension method was tried, but could not be fully constructed: | - | render(42) failed with + | render(42) + | + | failed with: | | Ambiguous overload. The overloaded alternatives of method render in object Test with types | [B](b: B)(using x$2: DummyImplicit): Char diff --git a/tests/neg/i6779.check b/tests/neg/i6779.check index d895203221ec..f1e1b9d5557b 100644 --- a/tests/neg/i6779.check +++ b/tests/neg/i6779.check @@ -11,7 +11,9 @@ | value f is not a member of T. | An extension method was tried, but could not be fully constructed: | - | Test.f[G[T]](x)(given_Stuff) failed with + | Test.f[G[T]](x) + | + | failed with: | | Found: (x : T) | Required: G[T] diff --git a/tests/neg/i7613.check b/tests/neg/i7613.check index 85d73b5c88f3..8ce12426c90c 100644 --- a/tests/neg/i7613.check +++ b/tests/neg/i7613.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i7613.scala:10:16 ---------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i7613.scala:10:16 ---------------------------------------------------------------------- 10 | new BazLaws[A] {} // error | ^ | No given instance of type Baz[A] was found for parameter x$1 of constructor BazLaws in trait BazLaws diff --git a/tests/neg/i7709.check b/tests/neg/i7709.check index 20ecc4adce5f..180cf1939d16 100644 --- a/tests/neg/i7709.check +++ b/tests/neg/i7709.check @@ -1,46 +1,46 @@ --- Error: tests/neg/i7709.scala:5:20 ----------------------------------------------------------------------------------- +-- [E173] Reference Error: tests/neg/i7709.scala:5:20 ------------------------------------------------------------------ 5 | class B extends X.Y // error | ^^^ | class Y cannot be accessed as a member of X.type from class B. | Access to protected class Y not permitted because enclosing object A | is not a subclass of object X where target is defined --- Error: tests/neg/i7709.scala:6:21 ----------------------------------------------------------------------------------- +-- [E173] Reference Error: tests/neg/i7709.scala:6:21 ------------------------------------------------------------------ 6 | class B2 extends X.Y: // error | ^^^ | class Y cannot be accessed as a member of X.type from class B2. | Access to protected class Y not permitted because enclosing object A | is not a subclass of object X where target is defined --- Error: tests/neg/i7709.scala:9:28 ----------------------------------------------------------------------------------- +-- [E173] Reference Error: tests/neg/i7709.scala:9:28 ------------------------------------------------------------------ 9 | class B4 extends B3(new X.Y) // error | ^^^ | class Y cannot be accessed as a member of X.type from class B4. | Access to protected class Y not permitted because enclosing object A | is not a subclass of object X where target is defined --- Error: tests/neg/i7709.scala:11:34 ---------------------------------------------------------------------------------- +-- [E173] Reference Error: tests/neg/i7709.scala:11:34 ----------------------------------------------------------------- 11 | def this(n: Int) = this(new X.Y().toString) // error | ^^^ | class Y cannot be accessed as a member of X.type from class B5. | Access to protected class Y not permitted because enclosing object A | is not a subclass of object X where target is defined --- Error: tests/neg/i7709.scala:13:20 ---------------------------------------------------------------------------------- +-- [E173] Reference Error: tests/neg/i7709.scala:13:20 ----------------------------------------------------------------- 13 | class B extends X.Y // error | ^^^ | class Y cannot be accessed as a member of X.type from class B. | Access to protected class Y not permitted because enclosing trait T | is not a subclass of object X where target is defined --- Error: tests/neg/i7709.scala:18:18 ---------------------------------------------------------------------------------- +-- [E173] Reference Error: tests/neg/i7709.scala:18:18 ----------------------------------------------------------------- 18 | def y = new xx.Y // error | ^^^^ | class Y cannot be accessed as a member of XX from class C. | Access to protected class Y not permitted because enclosing class C | is not a subclass of class XX where target is defined --- Error: tests/neg/i7709.scala:23:20 ---------------------------------------------------------------------------------- +-- [E173] Reference Error: tests/neg/i7709.scala:23:20 ----------------------------------------------------------------- 23 | def y = new xx.Y // error | ^^^^ | class Y cannot be accessed as a member of XX from class D. | Access to protected class Y not permitted because enclosing class D | is not a subclass of class XX where target is defined --- Error: tests/neg/i7709.scala:31:20 ---------------------------------------------------------------------------------- +-- [E173] Reference Error: tests/neg/i7709.scala:31:20 ----------------------------------------------------------------- 31 | class Q extends X.Y // error | ^^^ | class Y cannot be accessed as a member of p.X.type from class Q. diff --git a/tests/neg/i7751.scala b/tests/neg/i7751.scala index 4c835a533704..978ed860574f 100644 --- a/tests/neg/i7751.scala +++ b/tests/neg/i7751.scala @@ -1,3 +1,3 @@ -import language.experimental.fewerBraces +import language.`3.3` val a = Some(a=a,)=> // error // error val a = Some(x=y,)=> diff --git a/tests/neg/i7816.scala b/tests/neg/i7816.scala index f1eed694a085..41dd6c2ea98e 100644 --- a/tests/neg/i7816.scala +++ b/tests/neg/i7816.scala @@ -1,4 +1,4 @@ object A { def f()(>) = ??? // error - import f.NonExistent // error + import f.NonExistent } \ No newline at end of file diff --git a/tests/neg/i827.check b/tests/neg/i827.check new file mode 100644 index 000000000000..825aefbb480b --- /dev/null +++ b/tests/neg/i827.check @@ -0,0 +1,11 @@ +-- [E069] Naming Error: tests/neg/i827.scala:3:8 ----------------------------------------------------------------------- +3 | trait Inner extends self.Inner // error: cannot merge trait Inner in trait A with trait Inner in trait B as members of type (A & B)(B.this) + | ^ + |trait Inner cannot have the same name as trait Inner in trait A -- cannot define trait member with the same name as a trait member in self reference self. + |(Note: this can be resolved by using another name) +-- [E110] Syntax Error: tests/neg/i827.scala:7:16 ---------------------------------------------------------------------- +7 |class C extends C // error: cyclic inheritance: class C extends itself + | ^ + | Cyclic inheritance: class C extends itself + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i8337.scala b/tests/neg/i8337.scala index 7955c471fb70..6e42b96c2855 100644 --- a/tests/neg/i8337.scala +++ b/tests/neg/i8337.scala @@ -1,6 +1,6 @@ trait Foo[F <: Foo[F]] class Bar extends Foo[Bar] -object Q { // error: recursion limit exceeded - opaque type X <: Foo[X] = Bar // error: out of bounds // error +object Q { // error: cyclic reference + opaque type X <: Foo[X] = Bar // error: cyclic reference } \ No newline at end of file diff --git a/tests/neg/i8623.check b/tests/neg/i8623.check index b9d6e244e70e..39337a7839d8 100644 --- a/tests/neg/i8623.check +++ b/tests/neg/i8623.check @@ -8,4 +8,6 @@ | This might be because resolution yielded as given instance a function that is not | known to be total and side-effect free. | + | where: ?1 is an unknown value of type QC + | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i9185.check b/tests/neg/i9185.check index ffeed7e2fb2d..1616e739c473 100644 --- a/tests/neg/i9185.check +++ b/tests/neg/i9185.check @@ -5,11 +5,12 @@ |An extension method was tried, but could not be fully constructed: | | M.pure[A, F]("ola")( - | /* ambiguous: both object listMonad in object M and object optionMonad in object M match type M[F] */summon[M[F]] - | ) failed with + | /* ambiguous: both object listMonad in object M and object optionMonad in object M match type M[F] */summon[M[F]]) + | + | failed with: | | Ambiguous given instances: both object listMonad in object M and object optionMonad in object M match type M[F] of parameter m of method pure in object M --- Error: tests/neg/i9185.scala:8:28 ----------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i9185.scala:8:28 ----------------------------------------------------------------------- 8 | val value3 = M.pure("ola") // error | ^ |Ambiguous given instances: both object listMonad in object M and object optionMonad in object M match type M[F] of parameter m of method pure in object M @@ -19,7 +20,9 @@ | value len is not a member of String. | An extension method was tried, but could not be fully constructed: | - | M.len("abc") failed with + | M.len("abc") + | + | failed with: | | Found: ("abc" : String) | Required: Int diff --git a/tests/neg/i9568.check b/tests/neg/i9568.check index b6e20bdaf1be..3f318d0b0111 100644 --- a/tests/neg/i9568.check +++ b/tests/neg/i9568.check @@ -1,12 +1,16 @@ --- Error: tests/neg/i9568.scala:13:10 ---------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i9568.scala:13:10 ---------------------------------------------------------------------- 13 | blaMonad.foo(bla) // error: diverges | ^ - | No given instance of type => Monad[F] was found for parameter ev of method blaMonad in object Test - | - | where: F is a type variable with constraint <: [_] =>> Any - | . + | No given instance of type => Monad[F] was found for parameter ev of method blaMonad in object Test. | I found: | - | Test.blaMonad[F, S](Test.blaMonad[F, S]) + | Test.blaMonad[F², S](Test.blaMonad[F³, S²]) | - | But method blaMonad in object Test does not match type => Monad[F]. + | But method blaMonad in object Test does not match type => Monad[F²] + | + | where: F is a type variable with constraint <: [_] =>> Any + | F² is a type variable with constraint <: [_] =>> Any + | F³ is a type variable with constraint <: [_] =>> Any + | S is a type variable + | S² is a type variable + | . diff --git a/tests/neg/i9803.check b/tests/neg/i9803.check index cc7d56d585b0..20225f1f5bc5 100644 --- a/tests/neg/i9803.check +++ b/tests/neg/i9803.check @@ -1,8 +1,8 @@ -- [E049] Reference Error: tests/neg/i9803.scala:15:10 ----------------------------------------------------------------- 15 | println(f421()) // error | ^^^^ - | Reference to f421 is ambiguous, - | it is both imported by name by import bugs.shadowing.x.f421 + | Reference to f421 is ambiguous. + | It is both imported by name by import bugs.shadowing.x.f421 | and imported by name subsequently by import bugs.shadowing.y.f421 | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i9958.check b/tests/neg/i9958.check index d8b37b996ec1..3f65286eebc2 100644 --- a/tests/neg/i9958.check +++ b/tests/neg/i9958.check @@ -1,4 +1,4 @@ --- Error: tests/neg/i9958.scala:1:30 ----------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i9958.scala:1:30 ----------------------------------------------------------------------- 1 |val x = summon[[X] =>> (X, X)] // error | ^ | No given instance of type [X] =>> (X, X) was found for parameter x of method summon in object Predef diff --git a/tests/neg/implicitSearch.check b/tests/neg/implicitSearch.check index d7ea6c01801c..e8efc744ac0a 100644 --- a/tests/neg/implicitSearch.check +++ b/tests/neg/implicitSearch.check @@ -1,4 +1,4 @@ --- Error: tests/neg/implicitSearch.scala:13:12 ------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/implicitSearch.scala:13:12 ------------------------------------------------------------- 13 | sort(xs) // error (with a partially constructed implicit argument shown) | ^ | No given instance of type Test.Ord[List[List[T]]] was found for parameter o of method sort in object Test. @@ -7,7 +7,7 @@ | Test.listOrd[List[T]](Test.listOrd[T](/* missing */summon[Test.Ord[T]])) | | But no implicit values were found that match type Test.Ord[T]. --- Error: tests/neg/implicitSearch.scala:15:38 ------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/implicitSearch.scala:15:38 ------------------------------------------------------------- 15 | listOrd(listOrd(implicitly[Ord[T]] /*not found*/)) // error | ^ | No given instance of type Test.Ord[T] was found for parameter e of method implicitly in object Predef diff --git a/tests/neg/indent-colons.check b/tests/neg/indent-colons.check index 06bd7a31b079..f77d491f8b8f 100644 --- a/tests/neg/indent-colons.check +++ b/tests/neg/indent-colons.check @@ -1,29 +1,29 @@ --- Error: tests/neg/indent-colons.scala:6:4 ---------------------------------------------------------------------------- -6 | : // error +-- Error: tests/neg/indent-colons.scala:7:4 ---------------------------------------------------------------------------- +7 | : // error | ^ | end of statement expected but ':' found --- Error: tests/neg/indent-colons.scala:12:2 --------------------------------------------------------------------------- -12 | : // error +-- Error: tests/neg/indent-colons.scala:13:2 --------------------------------------------------------------------------- +13 | : // error | ^ | end of statement expected but ':' found --- Error: tests/neg/indent-colons.scala:19:2 --------------------------------------------------------------------------- -19 | : // error +-- Error: tests/neg/indent-colons.scala:20:2 --------------------------------------------------------------------------- +20 | : // error | ^ | end of statement expected but ':' found --- [E018] Syntax Error: tests/neg/indent-colons.scala:26:14 ------------------------------------------------------------ -26 | val y = 1 + : // error +-- [E018] Syntax Error: tests/neg/indent-colons.scala:27:14 ------------------------------------------------------------ +27 | val y = 1 + : // error | ^ | expression expected but : found | | longer explanation available when compiling with `-explain` --- [E018] Syntax Error: tests/neg/indent-colons.scala:30:27 ------------------------------------------------------------ -30 | val all = credentials ++ : // error +-- [E018] Syntax Error: tests/neg/indent-colons.scala:31:27 ------------------------------------------------------------ +31 | val all = credentials ++ : // error | ^ | expression expected but : found | | longer explanation available when compiling with `-explain` --- [E134] Type Error: tests/neg/indent-colons.scala:23:12 -------------------------------------------------------------- -23 | val x = 1.+ : // error +-- [E134] Type Error: tests/neg/indent-colons.scala:24:12 -------------------------------------------------------------- +24 | val x = 1.+ : // error | ^^^ | None of the overloaded alternatives of method + in class Int with types | (x: Double): Double @@ -35,27 +35,43 @@ | (x: Byte): Int | (x: String): String | match expected type (2 : Int) --- [E006] Not Found Error: tests/neg/indent-colons.scala:32:7 ---------------------------------------------------------- -32 | if file.isEmpty // error +-- [E006] Not Found Error: tests/neg/indent-colons.scala:33:7 ---------------------------------------------------------- +33 | if file.isEmpty // error | ^^^^ | Not found: file | | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/neg/indent-colons.scala:34:13 --------------------------------------------------------- -34 | else Seq(file) // error +-- [E006] Not Found Error: tests/neg/indent-colons.scala:35:13 --------------------------------------------------------- +35 | else Seq(file) // error | ^^^^ | Not found: file | | longer explanation available when compiling with `-explain` --- Error: tests/neg/indent-colons.scala:4:2 ---------------------------------------------------------------------------- -4 | tryEither: // error +-- [E178] Type Error: tests/neg/indent-colons.scala:5:2 ---------------------------------------------------------------- +5 | tryEither: // error | ^^^^^^^^^ - | missing arguments for method tryEither --- Error: tests/neg/indent-colons.scala:10:2 --------------------------------------------------------------------------- -10 | tryEither: // error + | missing argument list for method tryEither + | + | def tryEither[T](x: T)(y: Int => T): T + | + | where: T is a type variable + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/indent-colons.scala:11:2 --------------------------------------------------------------- +11 | tryEither: // error | ^^^^^^^^^ - | missing arguments for method tryEither --- Error: tests/neg/indent-colons.scala:17:2 --------------------------------------------------------------------------- -17 | Some(3).fold: // error + | missing argument list for method tryEither + | + | def tryEither[T](x: T)(y: Int => T): T + | + | where: T is a type variable + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/indent-colons.scala:18:2 --------------------------------------------------------------- +18 | Some(3).fold: // error | ^^^^^^^^^^^^ - | missing arguments for method fold in class Option + | missing argument list for method fold in class Option + | + | final def fold[B](ifEmpty: => B)(f: A => B): B + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/indent-colons.scala b/tests/neg/indent-colons.scala index 5364713dd4aa..240012f5489b 100644 --- a/tests/neg/indent-colons.scala +++ b/tests/neg/indent-colons.scala @@ -1,3 +1,4 @@ +import language.`3.2` def tryEither[T](x: T)(y: Int => T): T = ??? def test1 = diff --git a/tests/neg/indent-experimental.scala b/tests/neg/indent-experimental.scala index e945e172d1de..34ea5633010c 100644 --- a/tests/neg/indent-experimental.scala +++ b/tests/neg/indent-experimental.scala @@ -1,4 +1,4 @@ -import language.experimental.fewerBraces +import language.`3.3` val x = if true then: // error diff --git a/tests/neg/inline-param-unstable-path.scala b/tests/neg/inline-param-unstable-path.scala new file mode 100644 index 000000000000..be2d7142bc2f --- /dev/null +++ b/tests/neg/inline-param-unstable-path.scala @@ -0,0 +1,6 @@ +inline val a = 3 +inline def f(inline x: Int, y: Int, z: => Int): Unit = + val x2: x.type = x // error: (x : Int) is not a valid singleton type, since it is not an immutable path + val y2: y.type = y + val z2: z.type = z // error: (z : Int) is not a valid singleton type, since it is not an immutable path + val a2: a.type = a diff --git a/tests/neg/inline-val-in-inline-method.scala b/tests/neg/inline-val-in-inline-method.scala new file mode 100644 index 000000000000..fbd0f69ff2d5 --- /dev/null +++ b/tests/neg/inline-val-in-inline-method.scala @@ -0,0 +1,8 @@ +inline def f(inline x: Int): Unit = + inline val b = x + val c: b.type = b + +def test = + f(1) + def a = 1 + f(a) // error: inline value must have a literal constant type diff --git a/tests/neg/interleaving-ab.scala b/tests/neg/interleaving-ab.scala new file mode 100644 index 000000000000..e446626a2982 --- /dev/null +++ b/tests/neg/interleaving-ab.scala @@ -0,0 +1,11 @@ +import scala.language.experimental.clauseInterleaving + +object Ab: + given String = "" + given Double = 0 + + def illegal[A][B](x: A)(using B): B = summon[B] // error: Type parameter lists must be separated by a term or using parameter list + + def ab[A](x: A)[B](using B): B = summon[B] + def test = + ab[Int](0: Int) // error diff --git a/tests/neg/interleaving-params.scala b/tests/neg/interleaving-params.scala new file mode 100644 index 000000000000..dc6762cf0214 --- /dev/null +++ b/tests/neg/interleaving-params.scala @@ -0,0 +1,9 @@ +import scala.language.experimental.clauseInterleaving + +class Params{ + def bar[T](x: T)[T]: String = ??? // error + def zoo(x: Int)[T, U](x: U): T = ??? // error + def bbb[T <: U](x: U)[U]: U = ??? // error // error + def f0[T](implicit x: T)[U](y: U) = (x,y) // error + def f1[T](implicit x: T)[U] = (x,y) // error +} diff --git a/tests/neg/interleaving-signatureCollision.scala b/tests/neg/interleaving-signatureCollision.scala new file mode 100644 index 000000000000..a6a729ed3b62 --- /dev/null +++ b/tests/neg/interleaving-signatureCollision.scala @@ -0,0 +1,5 @@ +import scala.language.experimental.clauseInterleaving + +object signatureCollision: + def f[T](x: T)[U](y: U) = (x,y) + def f[T](x: T, y: T) = (x,y) // error diff --git a/tests/neg/interleaving-typeApply.check b/tests/neg/interleaving-typeApply.check new file mode 100644 index 000000000000..a50c1455bfbb --- /dev/null +++ b/tests/neg/interleaving-typeApply.check @@ -0,0 +1,30 @@ +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:10:11 -------------------------------------------- +10 | f3[String]() // error + | ^ + | Type argument String does not conform to upper bound Int + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:11:16 -------------------------------------------- +11 | f5[Int][Unit] // error + | ^ + | Type argument Unit does not conform to upper bound String + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:19 -------------------------------------------- +12 | f5[String][Unit] // error // error + | ^ + | Type argument Unit does not conform to upper bound String + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:11 -------------------------------------------- +12 | f5[String][Unit] // error // error + | ^ + | Type argument String does not conform to upper bound Int + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:13:11 -------------------------------------------- +13 | f7[String]()[Unit] // error + | ^ + | Type argument String does not conform to upper bound Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/interleaving-typeApply.scala b/tests/neg/interleaving-typeApply.scala new file mode 100644 index 000000000000..ad21fe2f0329 --- /dev/null +++ b/tests/neg/interleaving-typeApply.scala @@ -0,0 +1,14 @@ +import scala.language.experimental.clauseInterleaving + +object typeApply: + + def f3[T <: Int](using DummyImplicit)[U <: String](): T => T = ??? + def f5[T <: Int](using DummyImplicit)[U <: String]: [X <: Unit] => X => X = ??? + def f7[T <: Int](using DummyImplicit)[U <: String]()[X <: Unit]: X => X = ??? + + @main def test = { + f3[String]() // error + f5[Int][Unit] // error + f5[String][Unit] // error // error + f7[String]()[Unit] // error + } diff --git a/tests/neg/interleaving-unmatched.scala b/tests/neg/interleaving-unmatched.scala new file mode 100644 index 000000000000..2ce3074d07fa --- /dev/null +++ b/tests/neg/interleaving-unmatched.scala @@ -0,0 +1,5 @@ +import scala.language.experimental.clauseInterleaving + +object unmatched: + def f1[T (x: T)] = ??? // error + def f2(x: Any[)T] = ??? // error // error diff --git a/tests/neg/java-ann-extends-separate/Ann_1.java b/tests/neg/java-ann-extends-separate/Ann_1.java new file mode 100644 index 000000000000..97184df24c83 --- /dev/null +++ b/tests/neg/java-ann-extends-separate/Ann_1.java @@ -0,0 +1,3 @@ +public @interface Ann_1 { + int value(); +} diff --git a/tests/neg/java-ann-extends-separate/Test_2.scala b/tests/neg/java-ann-extends-separate/Test_2.scala new file mode 100644 index 000000000000..4e73b71679f6 --- /dev/null +++ b/tests/neg/java-ann-extends-separate/Test_2.scala @@ -0,0 +1,2 @@ +def test(x: Ann_1) = + val y: scala.annotation.Annotation = x // error diff --git a/tests/neg/java-ann-extends/Ann.java b/tests/neg/java-ann-extends/Ann.java new file mode 100644 index 000000000000..9ae845a8af63 --- /dev/null +++ b/tests/neg/java-ann-extends/Ann.java @@ -0,0 +1,3 @@ +public @interface Ann { + int value(); +} diff --git a/tests/neg/java-ann-extends/Test.scala b/tests/neg/java-ann-extends/Test.scala new file mode 100644 index 000000000000..629f1daa9acc --- /dev/null +++ b/tests/neg/java-ann-extends/Test.scala @@ -0,0 +1,2 @@ +def test(x: Ann) = + val y: scala.annotation.Annotation = x // error diff --git a/tests/neg/java-ann-super-class/Ann.java b/tests/neg/java-ann-super-class/Ann.java new file mode 100644 index 000000000000..9ae845a8af63 --- /dev/null +++ b/tests/neg/java-ann-super-class/Ann.java @@ -0,0 +1,3 @@ +public @interface Ann { + int value(); +} diff --git a/tests/neg/java-ann-super-class/Test.scala b/tests/neg/java-ann-super-class/Test.scala new file mode 100644 index 000000000000..cf2f72d2f633 --- /dev/null +++ b/tests/neg/java-ann-super-class/Test.scala @@ -0,0 +1,9 @@ +class Bar extends Ann(1) { // error + def value = 1 + def annotationType = classOf[Ann] +} + +def test = + // Typer errors + new Ann // error + new Ann(1) {} // error diff --git a/tests/neg/java-ann-super-class2/Ann.java b/tests/neg/java-ann-super-class2/Ann.java new file mode 100644 index 000000000000..9ae845a8af63 --- /dev/null +++ b/tests/neg/java-ann-super-class2/Ann.java @@ -0,0 +1,3 @@ +public @interface Ann { + int value(); +} diff --git a/tests/neg/java-ann-super-class2/Test.scala b/tests/neg/java-ann-super-class2/Test.scala new file mode 100644 index 000000000000..d5c22860899c --- /dev/null +++ b/tests/neg/java-ann-super-class2/Test.scala @@ -0,0 +1,3 @@ +def test = + // Posttyper errors + new Ann(1) // error diff --git a/tests/neg/java-ann-super-class3/Ann.java b/tests/neg/java-ann-super-class3/Ann.java new file mode 100644 index 000000000000..9ae845a8af63 --- /dev/null +++ b/tests/neg/java-ann-super-class3/Ann.java @@ -0,0 +1,3 @@ +public @interface Ann { + int value(); +} diff --git a/tests/neg/java-ann-super-class3/Test.scala b/tests/neg/java-ann-super-class3/Test.scala new file mode 100644 index 000000000000..8fd9791e6fe3 --- /dev/null +++ b/tests/neg/java-ann-super-class3/Test.scala @@ -0,0 +1,3 @@ +def test = + // Refchecks error + new Ann {} // error diff --git a/tests/neg/java-fake-ann-separate/FakeAnn_1.java b/tests/neg/java-fake-ann-separate/FakeAnn_1.java new file mode 100644 index 000000000000..597ea980585d --- /dev/null +++ b/tests/neg/java-fake-ann-separate/FakeAnn_1.java @@ -0,0 +1 @@ +interface FakeAnn_1 extends java.lang.annotation.Annotation { } diff --git a/tests/neg/java-fake-ann-separate/Test_2.scala b/tests/neg/java-fake-ann-separate/Test_2.scala new file mode 100644 index 000000000000..becc8babdaa0 --- /dev/null +++ b/tests/neg/java-fake-ann-separate/Test_2.scala @@ -0,0 +1,3 @@ +@FakeAnn_1 def test = // error + (1: @FakeAnn_1) // error + diff --git a/tests/neg/java-fake-ann/FakeAnn.java b/tests/neg/java-fake-ann/FakeAnn.java new file mode 100644 index 000000000000..2b055f782d42 --- /dev/null +++ b/tests/neg/java-fake-ann/FakeAnn.java @@ -0,0 +1 @@ +interface FakeAnn extends java.lang.annotation.Annotation { } diff --git a/tests/neg/java-fake-ann/Test.scala b/tests/neg/java-fake-ann/Test.scala new file mode 100644 index 000000000000..827527cb80bf --- /dev/null +++ b/tests/neg/java-fake-ann/Test.scala @@ -0,0 +1,2 @@ +@FakeAnn def test = // error + (1: @FakeAnn) // error diff --git a/tests/neg/matchtype-seq.check b/tests/neg/matchtype-seq.check index aba1e312da01..9c37fc08c4df 100644 --- a/tests/neg/matchtype-seq.check +++ b/tests/neg/matchtype-seq.check @@ -1,7 +1,7 @@ -- Error: tests/neg/matchtype-seq.scala:9:11 --------------------------------------------------------------------------- 9 | identity[T1[3]]("") // error | ^ - | Match type reduction failed since selector (3 : Int) + | Match type reduction failed since selector (3 : Int) | matches none of the cases | | case (1 : Int) => Int @@ -9,7 +9,7 @@ -- Error: tests/neg/matchtype-seq.scala:10:11 -------------------------------------------------------------------------- 10 | identity[T1[3]](1) // error | ^ - | Match type reduction failed since selector (3 : Int) + | Match type reduction failed since selector (3 : Int) | matches none of the cases | | case (1 : Int) => Int @@ -23,7 +23,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T1[Int] - | failed since selector Int + | failed since selector Int | does not match case (1 : Int) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -40,7 +40,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T1[Int] - | failed since selector Int + | failed since selector Int | does not match case (1 : Int) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -57,7 +57,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T2[Int] - | failed since selector Int + | failed since selector Int | does not match case (1 : Int) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -81,7 +81,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T2[Int] - | failed since selector Int + | failed since selector Int | does not match case (1 : Int) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -98,7 +98,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T3[Test.A] - | failed since selector Test.A + | failed since selector Test.A | does not match case Test.B => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -115,7 +115,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T3[Test.A] - | failed since selector Test.A + | failed since selector Test.A | does not match case Test.B => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -132,7 +132,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T5[Test.A] - | failed since selector Test.A + | failed since selector Test.A | does not match case Test.C => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -149,7 +149,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T5[Test.A] - | failed since selector Test.A + | failed since selector Test.A | does not match case Test.C => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -166,7 +166,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T7[Test.D] - | failed since selector Test.D + | failed since selector Test.D | does not match case Test.A2 => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -183,7 +183,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T7[Test.D] - | failed since selector Test.D + | failed since selector Test.D | does not match case Test.A2 => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -200,7 +200,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T8[Test.E2] - | failed since selector Test.E2 + | failed since selector Test.E2 | does not match case Test.E1 => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -224,7 +224,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T8[Test.E2] - | failed since selector Test.E2 + | failed since selector Test.E2 | does not match case Test.E1 => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -241,7 +241,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Nothing, String)] - | failed since selector (Nothing, String) + | failed since selector (Nothing, String) | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -254,7 +254,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(String, Nothing)] - | failed since selector (String, Nothing) + | failed since selector (String, Nothing) | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -267,7 +267,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Int, Nothing)] - | failed since selector (Int, Nothing) + | failed since selector (Int, Nothing) | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -280,7 +280,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Nothing, Int)] - | failed since selector (Nothing, Int) + | failed since selector (Nothing, Int) | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -293,7 +293,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(?, ?)] - | failed since selector (?, ?) + | failed since selector (?, ?) | does not match case (Int, String) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -310,7 +310,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(?, ?)] - | failed since selector (?, ?) + | failed since selector (?, ?) | does not match case (Int, String) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -327,7 +327,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Any, Any)] - | failed since selector (Any, Any) + | failed since selector (Any, Any) | does not match case (Int, String) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -344,7 +344,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Any, Any)] - | failed since selector (Any, Any) + | failed since selector (Any, Any) | does not match case (Int, String) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -361,7 +361,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.TA[Test.Box2[Int, Int, String]] - | failed since selector Test.Box2[Int, Int, String] + | failed since selector Test.Box2[Int, Int, String] | does not match case Test.Box2[Int, Int, Int] => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -378,7 +378,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.TA[Test.Box2[Int, Int, String]] - | failed since selector Test.Box2[Int, Int, String] + | failed since selector Test.Box2[Int, Int, String] | does not match case Test.Box2[Int, Int, Int] => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -395,7 +395,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.TD[Test.Box2_C[Int, Int, String]] - | failed since selector Test.Box2_C[Int, Int, String] + | failed since selector Test.Box2_C[Int, Int, String] | does not match case Test.Box2_C[Int, Int, Int] => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -412,7 +412,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test2.M[Some[A]] - | failed since selector Some[A] + | failed since selector Some[A] | does not match case Option[Int] => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -429,7 +429,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test2.M[Some[A]] - | failed since selector Some[A] + | failed since selector Some[A] | does not match case Option[Int] => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -446,7 +446,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test3.M[Test3.Inv[A]] - | failed since selector Test3.Inv[A] + | failed since selector Test3.Inv[A] | does not match case Test3.Inv[Int] => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -463,7 +463,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test4.M[Test4.Inv[Foo.this.A]] - | failed since selector Test4.Inv[Foo.this.A] + | failed since selector Test4.Inv[Foo.this.A] | does not match case Test4.Inv[Int] => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case diff --git a/tests/neg/mirror-synthesis-errors-b.check b/tests/neg/mirror-synthesis-errors-b.check index ea41d14da296..d9e394617c9d 100644 --- a/tests/neg/mirror-synthesis-errors-b.check +++ b/tests/neg/mirror-synthesis-errors-b.check @@ -1,40 +1,40 @@ --- Error: tests/neg/mirror-synthesis-errors-b.scala:21:56 -------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:21:56 -------------------------------------------------- 21 |val testA = summon[Mirror.ProductOf[Cns[Int] & Sm[Int]]] // error: unreleated | ^ |No given instance of type deriving.Mirror.ProductOf[Cns[Int] & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.ProductOf[Cns[Int] & Sm[Int]]: type `Cns[Int] & Sm[Int]` is not a generic product because its subpart `Cns[Int] & Sm[Int]` is an intersection of unrelated definitions class Cns and class Sm. --- Error: tests/neg/mirror-synthesis-errors-b.scala:22:56 -------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:22:56 -------------------------------------------------- 22 |val testB = summon[Mirror.ProductOf[Sm[Int] & Cns[Int]]] // error: unreleated | ^ |No given instance of type deriving.Mirror.ProductOf[Sm[Int] & Cns[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.ProductOf[Sm[Int] & Cns[Int]]: type `Sm[Int] & Cns[Int]` is not a generic product because its subpart `Sm[Int] & Cns[Int]` is an intersection of unrelated definitions class Sm and class Cns. --- Error: tests/neg/mirror-synthesis-errors-b.scala:23:49 -------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:23:49 -------------------------------------------------- 23 |val testC = summon[Mirror.Of[Cns[Int] & Sm[Int]]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]]: + |No given instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Cns[Int] & Sm[Int]]: | * type `Cns[Int] & Sm[Int]` is not a generic product because its subpart `Cns[Int] & Sm[Int]` is an intersection of unrelated definitions class Cns and class Sm. | * type `Cns[Int] & Sm[Int]` is not a generic sum because its subpart `Cns[Int] & Sm[Int]` is an intersection of unrelated definitions class Cns and class Sm. --- Error: tests/neg/mirror-synthesis-errors-b.scala:24:49 -------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:24:49 -------------------------------------------------- 24 |val testD = summon[Mirror.Of[Sm[Int] & Cns[Int]]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]]: + |No given instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Sm[Int] & Cns[Int]]: | * type `Sm[Int] & Cns[Int]` is not a generic product because its subpart `Sm[Int] & Cns[Int]` is an intersection of unrelated definitions class Sm and class Cns. | * type `Sm[Int] & Cns[Int]` is not a generic sum because its subpart `Sm[Int] & Cns[Int]` is an intersection of unrelated definitions class Sm and class Cns. --- Error: tests/neg/mirror-synthesis-errors-b.scala:25:55 -------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:25:55 -------------------------------------------------- 25 |val testE = summon[Mirror.ProductOf[Sm[Int] & Nn.type]] // error: unreleated | ^ |No given instance of type deriving.Mirror.ProductOf[Sm[Int] & Nn.type] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.ProductOf[Sm[Int] & Nn.type]: type `Sm[Int] & Nn.type` is not a generic product because its subpart `Sm[Int] & Nn.type` is an intersection of unrelated definitions class Sm and object Nn. --- Error: tests/neg/mirror-synthesis-errors-b.scala:26:55 -------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:26:55 -------------------------------------------------- 26 |val testF = summon[Mirror.ProductOf[Nn.type & Sm[Int]]] // error: unreleated | ^ |No given instance of type deriving.Mirror.ProductOf[Nn.type & Sm[Int]] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.ProductOf[Nn.type & Sm[Int]]: type `Nn.type & Sm[Int]` is not a generic product because its subpart `Nn.type & Sm[Int]` is an intersection of unrelated definitions object Nn and class Sm. --- Error: tests/neg/mirror-synthesis-errors-b.scala:27:54 -------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:27:54 -------------------------------------------------- 27 |val testG = summon[Mirror.Of[Foo.A.type & Foo.B.type]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)]: + |No given instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.A : Foo) & (Foo.B : Foo)]: | * type `(Foo.A : Foo) & (Foo.B : Foo)` is not a generic product because its subpart `(Foo.A : Foo) & (Foo.B : Foo)` is an intersection of unrelated definitions value A and value B. | * type `(Foo.A : Foo) & (Foo.B : Foo)` is not a generic sum because its subpart `(Foo.A : Foo) & (Foo.B : Foo)` is an intersection of unrelated definitions value A and value B. --- Error: tests/neg/mirror-synthesis-errors-b.scala:28:54 -------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors-b.scala:28:54 -------------------------------------------------- 28 |val testH = summon[Mirror.Of[Foo.B.type & Foo.A.type]] // error: unreleated | ^ - |No given instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)]: + |No given instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[(Foo.B : Foo) & (Foo.A : Foo)]: | * type `(Foo.B : Foo) & (Foo.A : Foo)` is not a generic product because its subpart `(Foo.B : Foo) & (Foo.A : Foo)` is an intersection of unrelated definitions value B and value A. | * type `(Foo.B : Foo) & (Foo.A : Foo)` is not a generic sum because its subpart `(Foo.B : Foo) & (Foo.A : Foo)` is an intersection of unrelated definitions value B and value A. diff --git a/tests/neg/mirror-synthesis-errors.check b/tests/neg/mirror-synthesis-errors.check index d108c99280ae..da795e80bf51 100644 --- a/tests/neg/mirror-synthesis-errors.check +++ b/tests/neg/mirror-synthesis-errors.check @@ -1,42 +1,42 @@ --- Error: tests/neg/mirror-synthesis-errors.scala:21:32 ---------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:21:32 ---------------------------------------------------- 21 |val testA = summon[Mirror.Of[A]] // error: Not a sealed trait | ^ - |No given instance of type deriving.Mirror.Of[A] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[A]: + |No given instance of type deriving.Mirror.Of[A] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[A]: | * trait A is not a generic product because it is not a case class | * trait A is not a generic sum because it is not a sealed trait --- Error: tests/neg/mirror-synthesis-errors.scala:22:32 ---------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:22:32 ---------------------------------------------------- 22 |val testC = summon[Mirror.Of[C]] // error: Does not have subclasses | ^ - |No given instance of type deriving.Mirror.Of[C] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[C]: + |No given instance of type deriving.Mirror.Of[C] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[C]: | * trait C is not a generic product because it is not a case class | * trait C is not a generic sum because it does not have subclasses --- Error: tests/neg/mirror-synthesis-errors.scala:23:32 ---------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:23:32 ---------------------------------------------------- 23 |val testD = summon[Mirror.Of[D]] // error: child SubD takes more than one parameter list | ^ - |No given instance of type deriving.Mirror.Of[D] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[D]: + |No given instance of type deriving.Mirror.Of[D] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[D]: | * class D is not a generic product because it is not a case class | * class D is not a generic sum because its child class SubD is not a generic product because it takes more than one parameter list --- Error: tests/neg/mirror-synthesis-errors.scala:24:38 ---------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:24:38 ---------------------------------------------------- 24 |val testSubD = summon[Mirror.Of[SubD]] // error: takes more than one parameter list | ^ - |No given instance of type deriving.Mirror.Of[SubD] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubD]: + |No given instance of type deriving.Mirror.Of[SubD] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[SubD]: | * class SubD is not a generic product because it takes more than one parameter list | * class SubD is not a generic sum because it is not a sealed class --- Error: tests/neg/mirror-synthesis-errors.scala:25:32 ---------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:25:32 ---------------------------------------------------- 25 |val testE = summon[Mirror.Of[E]] // error: Not an abstract class | ^ - |No given instance of type deriving.Mirror.Of[E] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[E]: + |No given instance of type deriving.Mirror.Of[E] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[E]: | * class E is not a generic product because it is not a case class | * class E is not a generic sum because it is not an abstract class --- Error: tests/neg/mirror-synthesis-errors.scala:26:32 ---------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:26:32 ---------------------------------------------------- 26 |val testF = summon[Mirror.Of[F]] // error: No children | ^ - |No given instance of type deriving.Mirror.Of[F] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[F]: + |No given instance of type deriving.Mirror.Of[F] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[F]: | * trait F is not a generic product because it is not a case class | * trait F is not a generic sum because it does not have subclasses --- Error: tests/neg/mirror-synthesis-errors.scala:27:36 ---------------------------------------------------------------- +-- [E172] Type Error: tests/neg/mirror-synthesis-errors.scala:27:36 ---------------------------------------------------- 27 |val testG = summon[Mirror.Of[Foo.G]] // error: Has anonymous subclasses | ^ - |No given instance of type deriving.Mirror.Of[Foo.G] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo.G]: + |No given instance of type deriving.Mirror.Of[Foo.G] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[Foo.G]: | * trait G is not a generic product because it is not a case class | * trait G is not a generic sum because it has anonymous or inaccessible subclasses diff --git a/tests/neg/missing-implicit-2.check b/tests/neg/missing-implicit-2.check index e1994c4bf02d..10f0192d1459 100644 --- a/tests/neg/missing-implicit-2.check +++ b/tests/neg/missing-implicit-2.check @@ -1,4 +1,4 @@ --- Error: tests/neg/missing-implicit-2.scala:4:24 ---------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/missing-implicit-2.scala:4:24 ---------------------------------------------------------- 4 |val f = Future[Unit] { } // error | ^ | Cannot find an implicit ExecutionContext. You might add diff --git a/tests/neg/missing-implicit1.check b/tests/neg/missing-implicit1.check index ccba4b0fa018..c94225aaf0a6 100644 --- a/tests/neg/missing-implicit1.check +++ b/tests/neg/missing-implicit1.check @@ -1,4 +1,4 @@ --- Error: tests/neg/missing-implicit1.scala:17:4 ----------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/missing-implicit1.scala:17:4 ----------------------------------------------------------- 17 | ff // error | ^ |No given instance of type testObjectInstance.Zip[Option] was found for parameter xs of method ff in object testObjectInstance @@ -16,7 +16,7 @@ | | import testObjectInstance.instances.traverseList | --- Error: tests/neg/missing-implicit1.scala:23:42 ---------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/missing-implicit1.scala:23:42 ---------------------------------------------------------- 23 | List(1, 2, 3).traverse(x => Option(x)) // error | ^ |No given instance of type testObjectInstance.Zip[Option] was found for an implicit parameter of method traverse in trait Traverse diff --git a/tests/neg/missing-implicit2.check b/tests/neg/missing-implicit2.check index 705e052c0a43..103c098f5798 100644 --- a/tests/neg/missing-implicit2.check +++ b/tests/neg/missing-implicit2.check @@ -1,4 +1,4 @@ --- Error: tests/neg/missing-implicit2.scala:10:18 ---------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/missing-implicit2.scala:10:18 ---------------------------------------------------------- 10 | f(using xFromY) // error | ^ | No given instance of type Y was found for parameter y of given instance xFromY @@ -7,7 +7,7 @@ | | import test.instances.y | --- Error: tests/neg/missing-implicit2.scala:16:5 ----------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/missing-implicit2.scala:16:5 ----------------------------------------------------------- 16 | f // error | ^ | No given instance of type X was found for parameter x of method f in object test diff --git a/tests/neg/missing-implicit3.check b/tests/neg/missing-implicit3.check index 3cf3b101f3ca..ab87bf99a32a 100644 --- a/tests/neg/missing-implicit3.check +++ b/tests/neg/missing-implicit3.check @@ -1,4 +1,4 @@ --- Error: tests/neg/missing-implicit3.scala:13:36 ---------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/missing-implicit3.scala:13:36 ---------------------------------------------------------- 13 |val sortedFoos = sort(List(new Foo)) // error | ^ | No given instance of type ord.Ord[ord.Foo] was found for an implicit parameter of method sort in package ord. diff --git a/tests/neg/missing-implicit4.check b/tests/neg/missing-implicit4.check index 4cc8a2182b8d..e243c208ecdf 100644 --- a/tests/neg/missing-implicit4.check +++ b/tests/neg/missing-implicit4.check @@ -1,4 +1,4 @@ --- Error: tests/neg/missing-implicit4.scala:14:4 ----------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/missing-implicit4.scala:14:4 ----------------------------------------------------------- 14 | ff // error | ^ | No given instance of type Zip[Option] was found for parameter xs of method ff @@ -16,7 +16,7 @@ | | import instances.traverseList | --- Error: tests/neg/missing-implicit4.scala:20:42 ---------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/missing-implicit4.scala:20:42 ---------------------------------------------------------- 20 | List(1, 2, 3).traverse(x => Option(x)) // error | ^ | No given instance of type Zip[Option] was found for an implicit parameter of method traverse in trait Traverse diff --git a/tests/neg/missing-import.scala b/tests/neg/missing-import.scala new file mode 100644 index 000000000000..8af26030435a --- /dev/null +++ b/tests/neg/missing-import.scala @@ -0,0 +1,3 @@ +class annotation extends Annotation // error +val s: String = "str" +val regex: Regex = s.r // error diff --git a/tests/neg/namedTypeParams.check b/tests/neg/namedTypeParams.check new file mode 100644 index 000000000000..3f6f9f7913e8 --- /dev/null +++ b/tests/neg/namedTypeParams.check @@ -0,0 +1,102 @@ +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:2:8 ------------------------------------------------------------ +2 |class D[type T] // error: identifier expected, but `type` found + | ^^^^ + | an identifier expected, but 'type' found + | + | longer explanation available when compiling with `-explain` +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:11:13 ---------------------------------------------------------- +11 | val x: C[T = Int] = // error: ']' expected, but `=` found // error + | ^ + | ']' expected, but '=' found +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:12:12 ---------------------------------------------------------- +12 | new C[T = Int] // error: ']' expected, but `=` found // error + | ^ + | ']' expected, but '=' found +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:14:22 ---------------------------------------------------------- +14 | class E extends C[T = Int] // error: ']' expected, but `=` found // error + | ^ + | ']' expected, but '=' found +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:15:22 ---------------------------------------------------------- +15 | class F extends C[T = Int]() // error: ']' expected, but `=` found // error + | ^ + | ']' expected, but '=' found +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:19:19 ---------------------------------------------------------- +19 | f[X = Int, String](1, "") // error // error + | ^ + | '=' expected, but ']' found +-- Error: tests/neg/namedTypeParams.scala:6:8 -------------------------------------------------------------------------- +6 | f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental + | ^^^ + | Named type arguments are experimental, + | they must be enabled with a `experimental.namedTypeArguments` language import or setting +-- Error: tests/neg/namedTypeParams.scala:6:17 ------------------------------------------------------------------------- +6 | f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental + | ^^^ + | Named type arguments are experimental, + | they must be enabled with a `experimental.namedTypeArguments` language import or setting +-- [E006] Not Found Error: tests/neg/namedTypeParams.scala:11:11 ------------------------------------------------------- +11 | val x: C[T = Int] = // error: ']' expected, but `=` found // error + | ^ + | Not found: type T + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/namedTypeParams.scala:12:10 ------------------------------------------------------- +12 | new C[T = Int] // error: ']' expected, but `=` found // error + | ^ + | Not found: type T + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/namedTypeParams.scala:14:20 ------------------------------------------------------- +14 | class E extends C[T = Int] // error: ']' expected, but `=` found // error + | ^ + | Not found: type T + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/namedTypeParams.scala:15:20 ------------------------------------------------------- +15 | class F extends C[T = Int]() // error: ']' expected, but `=` found // error + | ^ + | Not found: type T + | + | longer explanation available when compiling with `-explain` +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:19:18 ---------------------------------------------------------- +19 | f[X = Int, String](1, "") // error // error + | ^ + | Type parameter String is undefined. Expected one of X, Y. +-- Error: tests/neg/namedTypeParams.scala:20:12 ------------------------------------------------------------------------ +20 | f[X = Int][X = Int][Y = String](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[X = Int, X = Int] +-- Error: tests/neg/namedTypeParams.scala:22:12 ------------------------------------------------------------------------ +22 | f[X = Int][Y = String](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[X = Int, Y = String] +-- Error: tests/neg/namedTypeParams.scala:23:12 ------------------------------------------------------------------------ +23 | f[X = Int][String](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[X = Int, String] +-- Error: tests/neg/namedTypeParams.scala:25:15 ------------------------------------------------------------------------ +25 | f[Y = String][X = Int](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[Y = String, X = Int] +-- Error: tests/neg/namedTypeParams.scala:26:15 ------------------------------------------------------------------------ +26 | f[Y = String][Int](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[Y = String, Int] +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:33:9 ----------------------------------------------------------- +33 | f2[Y = String][X = Int](1, "") // error: Y is undefined + | ^^^^^^ + | Type parameter Y is undefined. Expected one of X. +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:34:9 ----------------------------------------------------------- +34 | f2[Y = String](1, "") // error: Y is undefined + | ^^^^^^ + | Type parameter Y is undefined. Expected one of X. diff --git a/tests/neg/namedTypeParams.scala b/tests/neg/namedTypeParams.scala index 8ed7c92241ea..53ef14188e12 100644 --- a/tests/neg/namedTypeParams.scala +++ b/tests/neg/namedTypeParams.scala @@ -5,7 +5,7 @@ object Test0: def f[X, Y](x: X, y: Y): Int = ??? f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental -object Test { +object Test: import language.experimental.namedTypeArguments val x: C[T = Int] = // error: ']' expected, but `=` found // error @@ -24,4 +24,11 @@ object Test { f[Y = String][X = Int](1, "") // error: illegal repeated type application f[Y = String][Int](1, "") // error: illegal repeated type application -} + +object TestInterleaving: + import language.experimental.namedTypeArguments + import language.experimental.clauseInterleaving + def f2[X](using DummyImplicit)[Y](x: X, y: Y): Int = ??? + + f2[Y = String][X = Int](1, "") // error: Y is undefined + f2[Y = String](1, "") // error: Y is undefined diff --git a/tests/neg/noimports-additional.scala b/tests/neg/noimports-additional.scala new file mode 100644 index 000000000000..e726db5b9b0a --- /dev/null +++ b/tests/neg/noimports-additional.scala @@ -0,0 +1,4 @@ +// scalac: -Yno-imports -Yimports:scala.annotation,scala.util.matching +class annotation extends Annotation +val s: String = "str" // error +val regex: Regex = new Regex("str") diff --git a/tests/neg-custom-args/noimports.scala b/tests/neg/noimports.scala similarity index 70% rename from tests/neg-custom-args/noimports.scala rename to tests/neg/noimports.scala index 6cef8dee8843..720d111757cd 100644 --- a/tests/neg-custom-args/noimports.scala +++ b/tests/neg/noimports.scala @@ -1,3 +1,4 @@ +// scalac: -Yno-imports object Test { val t: Int = 1 // error: not found Int } diff --git a/tests/neg-custom-args/noimports2.scala b/tests/neg/noimports2.scala similarity index 74% rename from tests/neg-custom-args/noimports2.scala rename to tests/neg/noimports2.scala index b75f1361ddb9..deee773c35c6 100644 --- a/tests/neg-custom-args/noimports2.scala +++ b/tests/neg/noimports2.scala @@ -1,3 +1,4 @@ +// scalac: -Yno-imports object Test { assert("asdf" == "asdf") // error: not found assert } diff --git a/tests/neg/nopredef-additional.scala b/tests/neg/nopredef-additional.scala new file mode 100644 index 000000000000..0b6a71ca7c53 --- /dev/null +++ b/tests/neg/nopredef-additional.scala @@ -0,0 +1,4 @@ +// scalac: -Yno-predef -Yimports:java.lang,scala.annotation,scala.util.matching +class annotation extends Annotation +val s: String = "str" +val regex: Regex = s.r // error diff --git a/tests/neg/nopredef.scala b/tests/neg/nopredef.scala index 0a22e200805a..fa9a344772a6 100644 --- a/tests/neg/nopredef.scala +++ b/tests/neg/nopredef.scala @@ -1,5 +1,4 @@ -import Predef.{assert as _} - +// scalac: -Yno-predef object Test { assert("asdf" == "asdf") // error: not found assert } diff --git a/tests/neg/opaque-bounds-1.scala b/tests/neg/opaque-bounds-1.scala new file mode 100644 index 000000000000..e05cd56ae71c --- /dev/null +++ b/tests/neg/opaque-bounds-1.scala @@ -0,0 +1,13 @@ +abstract class Test { + opaque type FlagSet = Int + + opaque type Flag <: FlagSet = String // error: type String outside bounds <: Test.this.FlagSet + + object Flag { + def make(s: String): Flag = s + } + + val f: Flag = Flag.make("hello") + val g: FlagSet = f + +} \ No newline at end of file diff --git a/tests/neg/opaque-bounds.scala b/tests/neg/opaque-bounds.scala index 3eb03117e469..c39f184e2008 100644 --- a/tests/neg/opaque-bounds.scala +++ b/tests/neg/opaque-bounds.scala @@ -2,7 +2,7 @@ class Test { // error: class Test cannot be instantiated opaque type FlagSet = Int - opaque type Flag <: FlagSet = String // error: type String outside bounds <: Test.this.FlagSet + opaque type Flag <: FlagSet = String object Flag { def make(s: String): Flag = s diff --git a/tests/neg/outdent-dot.check b/tests/neg/outdent-dot.check new file mode 100644 index 000000000000..c93c3bcfba73 --- /dev/null +++ b/tests/neg/outdent-dot.check @@ -0,0 +1,18 @@ +-- Error: tests/neg/outdent-dot.scala:6:5 ------------------------------------------------------------------------------ +6 | .toString // error + | ^ + | The start of this line does not match any of the previous indentation widths. + | Indentation width of current line : 5 spaces + | This falls between previous widths: 2 spaces and 6 spaces +-- Error: tests/neg/outdent-dot.scala:11:3 ----------------------------------------------------------------------------- +11 | .filter: x => // error + | ^ + | The start of this line does not match any of the previous indentation widths. + | Indentation width of current line : 3 spaces + | This falls between previous widths: 2 spaces and 6 spaces +-- Error: tests/neg/outdent-dot.scala:13:4 ----------------------------------------------------------------------------- +13 | println("foo") // error + | ^ + | The start of this line does not match any of the previous indentation widths. + | Indentation width of current line : 4 spaces + | This falls between previous widths: 2 spaces and 6 spaces diff --git a/tests/neg/outdent-dot.scala b/tests/neg/outdent-dot.scala new file mode 100644 index 000000000000..d0e882a3c073 --- /dev/null +++ b/tests/neg/outdent-dot.scala @@ -0,0 +1,13 @@ +def Block(f: => Int): Int = f + +def bar(): String = + Block: + 2 + 2 + .toString // error + +def foo(xs: List[Int]) = + xs.map: x => + x + 1 + .filter: x => // error + x > 0 + println("foo") // error diff --git a/tests/neg/overrides.scala b/tests/neg/overrides.scala index 48f3260721e9..c8fc8de97f7c 100644 --- a/tests/neg/overrides.scala +++ b/tests/neg/overrides.scala @@ -42,6 +42,9 @@ class A[T] { def next: T = ??? + import scala.language.experimental.clauseInterleaving + + def b[U <: T](x: Int)[V >: T](y: String) = false } class B extends A[Int] { @@ -52,6 +55,20 @@ class B extends A[Int] { override def next(): Int = ??? // error: incompatible type + import scala.language.experimental.clauseInterleaving + + override def b[T <: Int](x: Int)(y: String) = true // error +} + +class C extends A[String] { + + override def f(x: String) = x // error + + override def next: Int = ??? // error: incompatible type + + import scala.language.experimental.clauseInterleaving + + override def b[T <: String](x: Int)[U >: Int](y: String) = true // error: incompatible type } class X { @@ -103,4 +120,3 @@ class C extends A { override def m: Int = 42 // error: has incompatible type } } - diff --git a/tests/neg/parser-stability-9.scala b/tests/neg/parser-stability-9.scala index aaa77f216f37..932f6a15ad52 100644 --- a/tests/neg/parser-stability-9.scala +++ b/tests/neg/parser-stability-9.scala @@ -1,2 +1,2 @@ -import // error +import // error \ No newline at end of file diff --git a/tests/neg/recursive-lower-constraint.scala b/tests/neg/recursive-lower-constraint.scala index 8009ab5fce6e..cf45d8b95171 100644 --- a/tests/neg/recursive-lower-constraint.scala +++ b/tests/neg/recursive-lower-constraint.scala @@ -3,5 +3,5 @@ class Bar extends Foo[Bar] class A { def foo[T <: Foo[T], U >: Foo[T] <: T](x: T): T = x - foo(new Bar) // error + foo(new Bar) // error // error } diff --git a/tests/neg/repeatable/Test_1.scala b/tests/neg/repeatable/Test_1.scala index 3779b6ffa4a8..6466da95dfa8 100644 --- a/tests/neg/repeatable/Test_1.scala +++ b/tests/neg/repeatable/Test_1.scala @@ -6,11 +6,11 @@ import repeatable._ @FirstLevel_0(Array()) // error trait U -@FirstLevel_0(Array(Plain_0(4), Plain_0(5))) -@FirstLevel_0(Array(Plain_0(6), Plain_0(7))) +@FirstLevel_0(Array(new Plain_0(4), new Plain_0(5))) +@FirstLevel_0(Array(new Plain_0(6), new Plain_0(7))) @SecondLevel_0(Array()) // error trait T @SecondLevel_0(Array()) @SecondLevel_0(Array()) // error -trait S \ No newline at end of file +trait S diff --git a/tests/neg/safeThrowsStrawman.check b/tests/neg/safeThrowsStrawman.check index 6bf1ecdae513..0885404bbb76 100644 --- a/tests/neg/safeThrowsStrawman.check +++ b/tests/neg/safeThrowsStrawman.check @@ -1,4 +1,4 @@ --- Error: tests/neg/safeThrowsStrawman.scala:17:32 --------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/safeThrowsStrawman.scala:17:32 --------------------------------------------------------- 17 | if x then 1 else raise(Fail()) // error | ^ | The capability to throw exception scalax.Fail is missing. @@ -6,7 +6,7 @@ | - A using clause `(using CanThrow[scalax.Fail])` | - A raises clause in a result type such as `X raises scalax.Fail` | - an enclosing `try` that catches scalax.Fail --- Error: tests/neg/safeThrowsStrawman.scala:27:15 --------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/safeThrowsStrawman.scala:27:15 --------------------------------------------------------- 27 | println(bar) // error | ^ | The capability to throw exception Exception is missing. diff --git a/tests/neg/safeThrowsStrawman2.scala b/tests/neg/safeThrowsStrawman2.scala index 7d87baad6fa4..8d95494e30e0 100644 --- a/tests/neg/safeThrowsStrawman2.scala +++ b/tests/neg/safeThrowsStrawman2.scala @@ -24,7 +24,7 @@ def bar(x: Boolean)(using CanThrow[Fail]): Int = val x = new CanThrow[Fail]() // OK, x is erased val y: Any = new CanThrow[Fail]() // error: illegal reference to erased class CanThrow val y2: Any = new CTF() // error: illegal reference to erased class CanThrow - println(foo(true, ctf)) // error: ctf is declared as erased, but is in fact used + println(foo(true, ctf)) // not error: ctf will be erased at erasure val a = (1, new CanThrow[Fail]()) // error: illegal reference to erased class CanThrow def b: (Int, CanThrow[Fail]) = ??? def c = b._2 // ok; we only check creation sites diff --git a/tests/neg/saferExceptions.check b/tests/neg/saferExceptions.check index 5f51ce08d6db..77859e940b2d 100644 --- a/tests/neg/saferExceptions.check +++ b/tests/neg/saferExceptions.check @@ -1,4 +1,4 @@ --- Error: tests/neg/saferExceptions.scala:12:16 ------------------------------------------------------------------------ +-- [E172] Type Error: tests/neg/saferExceptions.scala:12:16 ------------------------------------------------------------ 12 | case 4 => throw Exception() // error | ^^^^^^^^^^^^^^^^^ | The capability to throw exception Exception is missing. @@ -11,7 +11,7 @@ | | import unsafeExceptions.canThrowAny | --- Error: tests/neg/saferExceptions.scala:17:46 ------------------------------------------------------------------------ +-- [E172] Type Error: tests/neg/saferExceptions.scala:17:46 ------------------------------------------------------------ 17 | def baz(x: Int): Int throws Failure = bar(x) // error | ^ | The capability to throw exception java.io.IOException is missing. diff --git a/tests/neg/selfInheritance.scala b/tests/neg/selfInheritance.scala index 073316de008c..e8eb2bab5624 100644 --- a/tests/neg/selfInheritance.scala +++ b/tests/neg/selfInheritance.scala @@ -26,7 +26,3 @@ object Test { object M extends C // error: illegal inheritance: self type Test.M.type of object M$ does not conform to self type B of parent class C } - -trait X { self: Y => } // error: missing requirement: self type Y & X of trait X does not conform to self type Z of required trait Y -trait Y { self: Z => } -trait Z diff --git a/tests/neg/subtyping.check b/tests/neg/subtyping.check index 832ff6296c52..c0ae1c71e007 100644 --- a/tests/neg/subtyping.check +++ b/tests/neg/subtyping.check @@ -1,8 +1,8 @@ --- Error: tests/neg/subtyping.scala:8:27 ------------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/subtyping.scala:8:27 ------------------------------------------------------------------- 8 | implicitly[B#X <:< A#X] // error: no implicit argument | ^ | Cannot prove that B#X <:< A#X. --- Error: tests/neg/subtyping.scala:12:27 ------------------------------------------------------------------------------ +-- [E172] Type Error: tests/neg/subtyping.scala:12:27 ------------------------------------------------------------------ 12 | implicitly[a.T <:< a.U] // error: no implicit argument | ^ | Cannot prove that a.T <:< a.U. diff --git a/tests/neg/summon-function.check b/tests/neg/summon-function.check index 863d1429d33f..b6ff4feea047 100644 --- a/tests/neg/summon-function.check +++ b/tests/neg/summon-function.check @@ -1,4 +1,4 @@ --- Error: tests/neg/summon-function.scala:2:23 ------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/summon-function.scala:2:23 ------------------------------------------------------------- 2 | summon[Int => String] // error | ^ | No given instance of type Int => String was found for parameter x of method summon in object Predef diff --git a/tests/neg/summonInline.check b/tests/neg/summonInline.check index 6c3839266ce4..e317ed53f8e2 100644 --- a/tests/neg/summonInline.check +++ b/tests/neg/summonInline.check @@ -1,4 +1,4 @@ --- Error: tests/neg/summonInline.scala:19:32 --------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/summonInline.scala:19:32 --------------------------------------------------------------- 19 |val missing1 = summonInlineCheck(1) // error | ^^^^^^^^^^^^^^^^^^^^ | Missing One @@ -9,7 +9,7 @@ 15 | case 1 => summonInline[Missing1] | ^^^^^^^^^^^^^^^^^^^^^^ -------------------------------------------------------------------------------------------------------------------- --- Error: tests/neg/summonInline.scala:20:32 --------------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/summonInline.scala:20:32 --------------------------------------------------------------- 20 |val missing2 = summonInlineCheck(2) // error | ^^^^^^^^^^^^^^^^^^^^ | Missing Two diff --git a/tests/neg/supertraits.scala b/tests/neg/supertraits.scala index 2fc79ca30f1d..6952c7640529 100644 --- a/tests/neg/supertraits.scala +++ b/tests/neg/supertraits.scala @@ -6,19 +6,20 @@ class C extends A, S val x = if ??? then B() else C() val x1: S = x // error -case object a -case object b +class Top +case object a extends Top +case object b extends Top val y = if ??? then a else b val y1: Product = y // error val y2: Serializable = y // error -enum Color { +enum Color extends Top { case Red, Green, Blue } -enum Nucleobase { +enum Nucleobase extends Top { case A, C, G, T } val z = if ??? then Color.Red else Nucleobase.G -val z1: reflect.Enum = z // error: Found: (z : Object) Required: reflect.Enum +val z1: reflect.Enum = z // error: Found: (z : Top) Required: reflect.Enum diff --git a/tests/neg/syntax-error-recovery.check b/tests/neg/syntax-error-recovery.check index 0bf626210fed..18d877833d79 100644 --- a/tests/neg/syntax-error-recovery.check +++ b/tests/neg/syntax-error-recovery.check @@ -94,12 +94,6 @@ | Not found: bam | | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/neg/syntax-error-recovery.scala:61:10 ------------------------------------------------- -61 | println(bam) // error - | ^^^ - | Not found: bam - | - | longer explanation available when compiling with `-explain` -- [E129] Potential Issue Warning: tests/neg/syntax-error-recovery.scala:7:2 ------------------------------------------- 6 | 2 7 | } diff --git a/tests/neg/syntax-error-recovery.scala b/tests/neg/syntax-error-recovery.scala index 775abeb97bdb..b6663cc9c70a 100644 --- a/tests/neg/syntax-error-recovery.scala +++ b/tests/neg/syntax-error-recovery.scala @@ -58,5 +58,5 @@ object Test2: def foo5(x: Int) = foo2(foo2(,) // error // error - println(bam) // error + println(bam) // error \ No newline at end of file diff --git a/tests/neg/t12715.scala b/tests/neg/t12715.scala new file mode 100644 index 000000000000..b24d51a6e9fa --- /dev/null +++ b/tests/neg/t12715.scala @@ -0,0 +1,17 @@ +trait A { def f: String } +trait B extends A { def f = "B" } +trait C extends A { override val f = "C" } +trait D extends C { override val f = "D" } +trait E extends A, B { def d = super.f } +final class O1 extends B, C, D, E // error: parent trait E has a super call which binds to the value D.f. Super calls can only target methods. +final class O2 extends B, C, E, D // error: parent trait E has a super call which binds to the value C.f. Super calls can only target methods. +final class O3 extends B, E, C, D + +object Main: + def main(args: Array[String]): Unit = + println(O1().f) // D + println(O2().f) // D + println(O3().f) // D + println(O3().d) // B + O1().d // was: NoSuchMethodError: 'java.lang.String D.f$(D)' + O2().d // was: NoSuchMethodError: 'java.lang.String C.f$(C)' diff --git a/tests/neg/t12715b.scala b/tests/neg/t12715b.scala new file mode 100644 index 000000000000..da024116d4b3 --- /dev/null +++ b/tests/neg/t12715b.scala @@ -0,0 +1,16 @@ +trait B: + def f: Float = 1.0f + +class A(override val f: Float) extends B + +trait C extends B: + abstract override val f = super.f + 100.0f + +trait D extends B: + abstract override val f = super.f + 1000.0f + +class ACD10 extends A(10.0f) with C with D // error: parent trait D has a super call to method B.f, which binds to the value C.f. Super calls can only target methods. + +object Test: + def main(args: Array[String]): Unit = + new ACD10 // was: NoSuchMethodError: 'float C.f$(C)' diff --git a/tests/neg/t5702-neg-bad-and-wild.check b/tests/neg/t5702-neg-bad-and-wild.check index f6d761a6726f..c461b76ea70b 100644 --- a/tests/neg/t5702-neg-bad-and-wild.check +++ b/tests/neg/t5702-neg-bad-and-wild.check @@ -56,6 +56,13 @@ | Recursive value $1$ needs type | | longer explanation available when compiling with `-explain` +-- Warning: tests/neg/t5702-neg-bad-and-wild.scala:13:22 --------------------------------------------------------------- +13 | case List(1, _*3:) => // error // error + | ^ + | Type ascriptions after patterns other than: + | * variable pattern, e.g. `case x: String =>` + | * number literal pattern, e.g. `case 10.5: Double =>` + | are no longer supported. Remove the type ascription or move it to a separate variable pattern. -- Warning: tests/neg/t5702-neg-bad-and-wild.scala:22:20 --------------------------------------------------------------- 22 | val K(x @ _*) = k | ^ diff --git a/tests/neg/t9419.scala b/tests/neg/t9419.scala new file mode 100644 index 000000000000..e9358c0ba641 --- /dev/null +++ b/tests/neg/t9419.scala @@ -0,0 +1,24 @@ +trait Magic[S]: + def init: S + def step(s: S): String + +object IntMagic extends Magic[Int]: + def init = 0 + def step(s: Int): String = (s - 1).toString + +object StrMagic extends Magic[String]: + def init = "hi" + def step(s: String): String = s.reverse + +object Main: + def onestep[T](m: () => Magic[T]): String = m().step(m().init) + def unostep[T](m: => Magic[T]): String = m.step(m.init) + + val iter: Iterator[Magic[?]] = Iterator.tabulate(Int.MaxValue)(i => if i % 2 == 0 then IntMagic else StrMagic) + + // was: class java.lang.String cannot be cast to class java.lang.Integer + def main(args: Array[String]): Unit = + onestep(() => iter.next()) // error + unostep(iter.next()) // error + val m = iter.next() + unostep(m) // ok, because m is a value diff --git a/tests/neg/t9419.zio-http.scala b/tests/neg/t9419.zio-http.scala new file mode 100644 index 000000000000..cff9ec51e6f9 --- /dev/null +++ b/tests/neg/t9419.zio-http.scala @@ -0,0 +1,18 @@ +// Minimisation of how the fix for t9419 affected zio-http +import java.util.concurrent.Future as JFuture + +trait Test: + def shutdownGracefully(): JFuture[_] + + def executedWildcard(jFuture: => JFuture[_]): Unit + def executedGeneric[A](jFuture: => JFuture[A]): Unit + def executedWildGen[A](jFuture: => JFuture[? <: A]): Unit + + // Even though JFuture is morally covariant, at least currently, + // there's no definition-side variance, so it's treated as invariant. + // So we have to be concerned that two different values of `JFuture[A]` + // with different types, blowing up together. So error in `fails`. + def works = executedWildcard(shutdownGracefully()) + def fails = executedGeneric(shutdownGracefully()) // error + def fixed = executedGeneric(shutdownGracefully().asInstanceOf[JFuture[Any]]) // fix + def best2 = executedWildGen(shutdownGracefully()) // even better, use use-site variance in the method diff --git a/tests/neg/transparent.scala b/tests/neg/transparent.scala index b4d89478b0ac..95899bfa0b33 100644 --- a/tests/neg/transparent.scala +++ b/tests/neg/transparent.scala @@ -1,7 +1,8 @@ transparent def foo = 1 // error transparent inline def bar = 2 // ok transparent inline val x = 2 // error -transparent class c // error +transparent class c // ok +transparent final class d // error transparent object y // error transparent trait t // ok transparent type T = c // error diff --git a/tests/neg-custom-args/nopredef.scala b/tests/neg/unimport-Predef-assert.scala similarity index 70% rename from tests/neg-custom-args/nopredef.scala rename to tests/neg/unimport-Predef-assert.scala index b75f1361ddb9..0a22e200805a 100644 --- a/tests/neg-custom-args/nopredef.scala +++ b/tests/neg/unimport-Predef-assert.scala @@ -1,3 +1,5 @@ +import Predef.{assert as _} + object Test { assert("asdf" == "asdf") // error: not found assert } diff --git a/tests/neg/union.scala b/tests/neg/union.scala index 0a702ab70058..c6fd42e6629e 100644 --- a/tests/neg/union.scala +++ b/tests/neg/union.scala @@ -11,8 +11,9 @@ object Test { } object O { - class A - class B + class Top + class A extends Top + class B extends Top def f[T](x: T, y: T): T = x val x: A = f(new A { }, new A) diff --git a/tests/neg/warn-value-discard.check b/tests/neg/warn-value-discard.check new file mode 100644 index 000000000000..ab6539dd5cd8 --- /dev/null +++ b/tests/neg/warn-value-discard.check @@ -0,0 +1,20 @@ +-- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:15:35 ---------------------------------------------- +15 | firstThing().map(_ => secondThing()) // error + | ^^^^^^^^^^^^^ + | discarded non-Unit value of type Either[Failed, Unit] +-- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:18:35 ---------------------------------------------- +18 | firstThing().map(_ => secondThing()) // error + | ^^^^^^^^^^^^^ + | discarded non-Unit value of type Either[Failed, Unit] +-- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:27:36 ---------------------------------------------- +27 | mutable.Set.empty[String].remove("") // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | discarded non-Unit value of type Boolean +-- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:39:41 ---------------------------------------------- +39 | mutable.Set.empty[String].subtractOne("") // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | discarded non-Unit value of type scala.collection.mutable.Set[String] +-- [E175] Potential Issue Error: tests/neg/warn-value-discard.scala:59:4 ----------------------------------------------- +59 | mutable.Set.empty[String] += "" // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | discarded non-Unit value of type scala.collection.mutable.Set[String] diff --git a/tests/neg/warn-value-discard.scala b/tests/neg/warn-value-discard.scala new file mode 100644 index 000000000000..149433395cc5 --- /dev/null +++ b/tests/neg/warn-value-discard.scala @@ -0,0 +1,66 @@ +// scalac: -Wvalue-discard -Werror + +import scala.util.{Either, Right, Left} +import scala.collection.mutable + +case class Failed(msg: String) + +def firstThing(): Either[Failed, Unit] = + Right(()) + +def secondThing(): Either[Failed, Unit] = + Left(Failed("whoops you should have flatMapped me")) + +def singleExpr(): Either[Failed, Unit] = + firstThing().map(_ => secondThing()) // error + +def block(): Either[Failed, Unit] = { + firstThing().map(_ => secondThing()) // error +} + +class ValueDiscardTest: + val field = mutable.Set.empty[String] + + def remove(): Unit = + // Set#remove returns a Boolean, not this.type + // --> Warning + mutable.Set.empty[String].remove("") // error + + // TODO IMHO we don't need to support this, + // as it's just as easy to add a @nowarn annotation as a Unit ascription + //def removeAscribed(): Unit = { + // mutable.Set.empty[String].remove(""): Unit // nowarn + //} + + def subtract(): Unit = + // - Set#subtractOne returns this.type + // - receiver is not a field or a local variable (not quite sure what you'd call it) + // --> Warning + mutable.Set.empty[String].subtractOne("") // error + + def mutateLocalVariable(): Unit = { + // - Set#subtractOne returns this.type + // - receiver is a local variable + // --> No warning + val s: mutable.Set[String] = mutable.Set.empty[String] + s.subtractOne("") + } + + def mutateField(): Unit = + // - Set#subtractOne returns this.type + // - receiver is a local variable + // --> No warning + field.subtractOne("") + + def assignmentOperator(): Unit = + // - += returns this.type + // - receiver is not a field or a local variable + // --> Warning + mutable.Set.empty[String] += "" // error + + def assignmentOperatorLocalVariable(): Unit = + // - += returns this.type + // - receiver is a local variable + // --> No warning + val s: mutable.Set[String] = mutable.Set.empty[String] + s += "" diff --git a/tests/neg/yimports-custom.check b/tests/neg/yimports-custom.check new file mode 100644 index 000000000000..6ed2eb8b1df3 --- /dev/null +++ b/tests/neg/yimports-custom.check @@ -0,0 +1,7 @@ + +-- [E006] Not Found Error: tests/neg/yimports-custom/C_2.scala:5:16 ---------------------------------------------------- +5 | def greet() = println("hello, world!") // error + | ^^^^^^^ + | Not found: println + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/yimports-custom/C_2.scala b/tests/neg/yimports-custom/C_2.scala new file mode 100644 index 000000000000..6ba25ad2963c --- /dev/null +++ b/tests/neg/yimports-custom/C_2.scala @@ -0,0 +1,6 @@ +// scalac: -Yimports:hello.world.minidef + +class C { + val v: Numb = Magic + def greet() = println("hello, world!") // error +} diff --git a/tests/neg/yimports-custom/minidef_1.scala b/tests/neg/yimports-custom/minidef_1.scala new file mode 100644 index 000000000000..5d18d0a39584 --- /dev/null +++ b/tests/neg/yimports-custom/minidef_1.scala @@ -0,0 +1,7 @@ + +package hello.world + +object minidef { + type Numb = Int + final val Magic = 42 +} diff --git a/tests/neg/yimports-nojava.check b/tests/neg/yimports-nojava.check new file mode 100644 index 000000000000..8aef6786ca21 --- /dev/null +++ b/tests/neg/yimports-nojava.check @@ -0,0 +1,12 @@ +-- [E006] Not Found Error: tests/neg/yimports-nojava.scala:5:16 -------------------------------------------------------- +5 | def g() = new Integer(42) // error + | ^^^^^^^ + | Not found: type Integer + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/yimports-nojava.scala:6:16 -------------------------------------------------------- +6 | def sleep() = Thread.sleep(42000L) // error + | ^^^^^^ + | Not found: Thread + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/yimports-nojava.scala b/tests/neg/yimports-nojava.scala new file mode 100644 index 000000000000..35233e37a775 --- /dev/null +++ b/tests/neg/yimports-nojava.scala @@ -0,0 +1,7 @@ +// scalac: -Yimports:scala,scala.Predef + +trait T { + def f() = println("hello, world!") + def g() = new Integer(42) // error + def sleep() = Thread.sleep(42000L) // error +} diff --git a/tests/neg/yimports-nosuch.check b/tests/neg/yimports-nosuch.check new file mode 100644 index 000000000000..5a77d7f8d016 --- /dev/null +++ b/tests/neg/yimports-nosuch.check @@ -0,0 +1,2 @@ +error: bad preamble import skala +error: bad preamble import scala.Predeff diff --git a/tests/neg/yimports-nosuch.scala b/tests/neg/yimports-nosuch.scala new file mode 100644 index 000000000000..431daf39a180 --- /dev/null +++ b/tests/neg/yimports-nosuch.scala @@ -0,0 +1,5 @@ +// scalac: -Yimports:skala,scala.Predeff +// +class C +// nopos-error +// nopos-error diff --git a/tests/neg/yimports-order.check b/tests/neg/yimports-order.check new file mode 100644 index 000000000000..b49503f75e01 --- /dev/null +++ b/tests/neg/yimports-order.check @@ -0,0 +1,16 @@ +-- [E006] Not Found Error: tests/neg/yimports-order.scala:9:16 --------------------------------------------------------- +9 | def f() = Map("hello" -> "world") // error // error + | ^^^ + | Not found: Map + | + | longer explanation available when compiling with `-explain` +-- [E008] Not Found Error: tests/neg/yimports-order.scala:9:28 --------------------------------------------------------- +9 | def f() = Map("hello" -> "world") // error // error + | ^^^^^^^^^^ + | value -> is not a member of String +-- [E006] Not Found Error: tests/neg/yimports-order.scala:10:16 -------------------------------------------------------- +10 | def g() = println(f()) // error + | ^^^^^^^ + | Not found: println + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/yimports-order.scala b/tests/neg/yimports-order.scala new file mode 100644 index 000000000000..9cba91385b8a --- /dev/null +++ b/tests/neg/yimports-order.scala @@ -0,0 +1,13 @@ + +package top { + package middle { + class C { + def c() = println("hello, world") + } + import Predef.{Map => _} + object Test { + def f() = Map("hello" -> "world") // error // error + def g() = println(f()) // error + } + } +} diff --git a/tests/neg/yimports-predef.check b/tests/neg/yimports-predef.check new file mode 100644 index 000000000000..eb8881e04223 --- /dev/null +++ b/tests/neg/yimports-predef.check @@ -0,0 +1,4 @@ +-- [E008] Not Found Error: tests/neg/yimports-predef.scala:6:21 -------------------------------------------------------- +6 | def f[A](x: A) = x + 42 // error + | ^^^ + | value + is not a member of A diff --git a/tests/neg/yimports-predef.scala b/tests/neg/yimports-predef.scala new file mode 100644 index 000000000000..8bfe89b08cd8 --- /dev/null +++ b/tests/neg/yimports-predef.scala @@ -0,0 +1,7 @@ +// scalac: -Yimports:scala,scala.Predef +// +import Predef.{any2stringadd => _, _} + +class classic { + def f[A](x: A) = x + 42 // error +} diff --git a/tests/neg/yimports-stable.check b/tests/neg/yimports-stable.check new file mode 100644 index 000000000000..c5bfd914ae07 --- /dev/null +++ b/tests/neg/yimports-stable.check @@ -0,0 +1,14 @@ + +error: bad preamble import hello.world.potions +-- [E006] Not Found Error: tests/neg/yimports-stable/C_2.scala:4:9 ----------------------------------------------------- +4 | val v: Numb = magic // error // error + | ^^^^ + | Not found: type Numb + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/yimports-stable/C_2.scala:4:16 ---------------------------------------------------- +4 | val v: Numb = magic // error // error + | ^^^^^ + | Not found: magic + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/yimports-stable/C_2.scala b/tests/neg/yimports-stable/C_2.scala new file mode 100644 index 000000000000..0b97775f1a01 --- /dev/null +++ b/tests/neg/yimports-stable/C_2.scala @@ -0,0 +1,7 @@ +// scalac: -Yimports:scala,scala.Predef,hello.world.potions +// +class C { + val v: Numb = magic // error // error + def greet() = println("hello, world!") +} +// nopos-error diff --git a/tests/neg/yimports-stable/minidef_1.scala b/tests/neg/yimports-stable/minidef_1.scala new file mode 100644 index 000000000000..b3ea7445df24 --- /dev/null +++ b/tests/neg/yimports-stable/minidef_1.scala @@ -0,0 +1,11 @@ + +package hello + +trait stuff { + type Numb = Int + val magic = 42 +} + +object world { + val potions = new stuff {} +} diff --git a/tests/patmat/aliasing.check b/tests/patmat/aliasing.check index d7c21e8d0605..c367626d6f1e 100644 --- a/tests/patmat/aliasing.check +++ b/tests/patmat/aliasing.check @@ -1,3 +1,3 @@ 14: Pattern Match Exhaustivity: _: Trait & Test.Alias1, _: Clazz & Test.Alias1 19: Pattern Match Exhaustivity: _: Trait & Test.Alias2 -23: Pattern Match Exhaustivity: _: Trait & (Test.Alias2 & OpenTrait2){x: Int} +23: Pattern Match Exhaustivity: _: Trait & (Test.Alias2 & OpenTrait2){val x: Int} diff --git a/tests/patmat/andtype-refinedtype-interaction.check b/tests/patmat/andtype-refinedtype-interaction.check index 9f57c5ba4867..d9512b5cb3e4 100644 --- a/tests/patmat/andtype-refinedtype-interaction.check +++ b/tests/patmat/andtype-refinedtype-interaction.check @@ -1,9 +1,9 @@ -32: Pattern Match Exhaustivity: _: Trait & C1{x: Int} -48: Pattern Match Exhaustivity: _: Trait & (C1 | (C2 | T1)){x: Int} & (C3 | (C4 | T2)){x: Int}, _: Clazz & (C1 | (C2 | T1)){x: Int} & (C3 | (C4 | T2)){x: Int} -54: Pattern Match Exhaustivity: _: Trait & (C1 | (C2 | T1)){x: Int} & C3{x: Int} -59: Pattern Match Exhaustivity: _: Trait & (C1 & C2){x: Int} -65: Pattern Match Exhaustivity: _: Trait & (C1 | C2){x: Int} & (C3 | SubC1){x: Int} -72: Pattern Match Exhaustivity: _: Trait & (T1 & (C1 | SubC2)){x: Int} & (T2 & (C2 | C3 | SubC1)){x: Int} & - SubSubC1{x: Int} -79: Pattern Match Exhaustivity: _: Trait & (T1 & (C1 | SubC2)){x: Int} & (T2 & (C2 | C3 | SubC1)){x: Int} & - SubSubC2{x: Int} +32: Pattern Match Exhaustivity: _: Trait & C1{val x: Int} +48: Pattern Match Exhaustivity: _: Trait & (C1 | (C2 | T1)){val x: Int} & (C3 | (C4 | T2)){val x: Int}, _: Clazz & (C1 | (C2 | T1)){val x: Int} & (C3 | (C4 | T2)){val x: Int} +54: Pattern Match Exhaustivity: _: Trait & (C1 | (C2 | T1)){val x: Int} & C3{val x: Int} +59: Pattern Match Exhaustivity: _: Trait & (C1 & C2){val x: Int} +65: Pattern Match Exhaustivity: _: Trait & (C1 | C2){val x: Int} & (C3 | SubC1){val x: Int} +72: Pattern Match Exhaustivity: _: Trait & (T1 & (C1 | SubC2)){val x: Int} & (T2 & (C2 | C3 | SubC1)){val x: Int} & + SubSubC1{val x: Int} +79: Pattern Match Exhaustivity: _: Trait & (T1 & (C1 | SubC2)){val x: Int} & (T2 & (C2 | C3 | SubC1)){val x: Int} & + SubSubC2{val x: Int} diff --git a/tests/patmat/i11541.scala b/tests/patmat/i11541.scala new file mode 100644 index 000000000000..4ac1af08c80b --- /dev/null +++ b/tests/patmat/i11541.scala @@ -0,0 +1,13 @@ +import scala.reflect.ClassTag + +class Test: + type A + + given ClassTag[A] = ??? + + var a: A | Null = null + + a match { //WARNING: match may not be exhaustive. It would fail on pattern case: _: A + case null => + case a: A => + } diff --git a/tests/patmat/isSubspace-Typ-Prod.scala b/tests/patmat/isSubspace-Typ-Prod.scala new file mode 100644 index 000000000000..df17c99d67be --- /dev/null +++ b/tests/patmat/isSubspace-Typ-Prod.scala @@ -0,0 +1,7 @@ +case class Foo[T](x: T) +class Bar extends Foo[String]("") + +def test(x: Any) = x match + case Foo(1) => + case _: Bar => // used to warn about unreachable case + // case _: Foo[_] => // still warns, something else is wrong diff --git a/tests/patmat/java-enum1/ParameterModifier.java b/tests/patmat/java-enum1/ParameterModifier.java new file mode 100644 index 000000000000..c9ddc157ba7e --- /dev/null +++ b/tests/patmat/java-enum1/ParameterModifier.java @@ -0,0 +1,8 @@ +public enum ParameterModifier { + Repeated, + Plain, + ByName; + + private ParameterModifier() { + } +} diff --git a/tests/patmat/java-enum1/Test.scala b/tests/patmat/java-enum1/Test.scala new file mode 100644 index 000000000000..b6ea483d8fb4 --- /dev/null +++ b/tests/patmat/java-enum1/Test.scala @@ -0,0 +1,6 @@ +class Test: + private def showParameterModifier(base: String, pm: ParameterModifier): String = pm match { + case ParameterModifier.Plain => base + case ParameterModifier.Repeated => base + "*" + case ParameterModifier.ByName => "=> " + base + } diff --git a/tests/pending/neg/cc-depfun.scala b/tests/pending/neg/cc-depfun.scala new file mode 100644 index 000000000000..4d600872d208 --- /dev/null +++ b/tests/pending/neg/cc-depfun.scala @@ -0,0 +1,14 @@ +import language.experimental.captureChecking + +// compare with neg-custom-args/captures/depfun.scala, which produces errors +// but the errors go away if ->{} gets replaced by ->. + +trait Cap { def use(): Unit } + +def main() = { + val f: (io: Cap^) -> () -> Unit = + io => () => io.use() // error + + val g: (Cap^) -> () -> Unit = + io => () => io.use() // error +} diff --git a/tests/pending/neg/i16451.check b/tests/pending/neg/i16451.check new file mode 100644 index 000000000000..e53085e8eafa --- /dev/null +++ b/tests/pending/neg/i16451.check @@ -0,0 +1,24 @@ +-- Error: tests/neg/i16451.scala:13:9 ---------------------------------------------------------------------------------- +13 | case x: Wrapper[Color.Red.type] => Some(x) // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] +-- Error: tests/neg/i16451.scala:21:9 ---------------------------------------------------------------------------------- +21 | case x: Wrapper[Color.Red.type] => Some(x) // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Any +-- Error: tests/neg/i16451.scala:25:9 ---------------------------------------------------------------------------------- +25 | case x: Wrapper[Color.Red.type] => Some(x) // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] +-- Error: tests/neg/i16451.scala:29:9 ---------------------------------------------------------------------------------- +29 | case x: Wrapper[Color.Red.type] => Some(x) // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from A1 +-- Error: tests/neg/i16451.scala:34:11 --------------------------------------------------------------------------------- +34 | case x: Wrapper[Color.Red.type] => x // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] +-- Error: tests/neg/i16451.scala:39:11 --------------------------------------------------------------------------------- +39 | case x: Wrapper[Color.Red.type] => x // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] diff --git a/tests/pending/neg/i16451.scala b/tests/pending/neg/i16451.scala new file mode 100644 index 000000000000..49997d2bcf92 --- /dev/null +++ b/tests/pending/neg/i16451.scala @@ -0,0 +1,40 @@ +// scalac: -Werror +enum Color: + case Red, Green + +case class Wrapper[A](value: A) + +object Test: + def test_correct(x: Wrapper[Color]): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // error + case null => None + + def test_different(x: Wrapper[Color]): Option[Wrapper[Color]] = x match + case x @ Wrapper(_: Color.Red.type) => Some(x) + case x @ Wrapper(_: Color.Green.type) => None + + def test_any(x: Any): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // error + case _ => None + + def test_wrong(x: Wrapper[Color]): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // error + case null => None + + def t2[A1 <: Wrapper[Color]](x: A1): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // error + case null => None + + def test_wrong_seq(xs: Seq[Wrapper[Color]]): Seq[Wrapper[Color.Red.type]] = + xs.collect { + case x: Wrapper[Color.Red.type] => x // error + } + + def test_wrong_seq2(xs: Seq[Wrapper[Color]]): Seq[Wrapper[Color.Red.type]] = + xs.collect { x => x match + case x: Wrapper[Color.Red.type] => x // error + } + + def main(args: Array[String]): Unit = + println(test_wrong_seq(Seq(Wrapper(Color.Red), Wrapper(Color.Green)))) + // outputs: List(Wrapper(Red), Wrapper(Green)) diff --git a/tests/pending/neg/yimports-custom-b.check b/tests/pending/neg/yimports-custom-b.check new file mode 100644 index 000000000000..d046a1d8f6cc --- /dev/null +++ b/tests/pending/neg/yimports-custom-b.check @@ -0,0 +1,10 @@ + +C_2.scala:8: error: not found: type Numb + val v: Numb = Answer + ^ +-- [E006] Not Found Error: tests/neg/yimports-custom-b/C_2.scala:9:16 -------------------------------------------------- +9 | def greet() = println("hello, world!") // error + | ^^^^^^^ + | Not found: println + | + | longer explanation available when compiling with `-explain` diff --git a/tests/pending/neg/yimports-custom-b/C_2.scala b/tests/pending/neg/yimports-custom-b/C_2.scala new file mode 100644 index 000000000000..8da798e80b0d --- /dev/null +++ b/tests/pending/neg/yimports-custom-b/C_2.scala @@ -0,0 +1,10 @@ +// scalac: -Yimports:hello.world.minidef + +import hello.{world => hw} +import hw.minidef.{Magic => Answer} + +// Finds the answer, but dumb to forget Numb +class C { + val v: Numb = Answer // error + def greet() = println("hello, world!") // error +} diff --git a/tests/pending/neg/yimports-custom-b/minidef_1.scala b/tests/pending/neg/yimports-custom-b/minidef_1.scala new file mode 100644 index 000000000000..befc137b6ab6 --- /dev/null +++ b/tests/pending/neg/yimports-custom-b/minidef_1.scala @@ -0,0 +1,8 @@ +// scalac: -Yimports:scala + +package hello.world + +object minidef { + type Numb = Int + final val Magic = 42 +} diff --git a/tests/pending/neg/yimports-masked.check b/tests/pending/neg/yimports-masked.check new file mode 100644 index 000000000000..ae715313392a --- /dev/null +++ b/tests/pending/neg/yimports-masked.check @@ -0,0 +1,10 @@ + +C_2.scala:11: error: not found: type Numb + val v: Numb = Answer + ^ +-- [E006] Not Found Error: tests/neg/yimports-masked/C_2.scala:12:18 --------------------------------------------------- +12 | def greet() = println("hello, world!") // error + | ^^^^^^^ + | Not found: println + | + | longer explanation available when compiling with `-explain` diff --git a/tests/pending/neg/yimports-masked/C_2.scala b/tests/pending/neg/yimports-masked/C_2.scala new file mode 100644 index 000000000000..1b6c736bad7b --- /dev/null +++ b/tests/pending/neg/yimports-masked/C_2.scala @@ -0,0 +1,14 @@ +// scalac: -Yimports:scala,hello.world.minidef + +// import at top level or top of package disables implicit import. +// the import can appear at any statement position, here, end of package. +// Update: with new trick, the import has to be completed before usages. + +import hello.world.minidef.{Magic => Answer} + +package p { + class C { + val v: Numb = Answer // error + def greet() = println("hello, world!") // error + } +} diff --git a/tests/pending/neg/yimports-masked/minidef_1.scala b/tests/pending/neg/yimports-masked/minidef_1.scala new file mode 100644 index 000000000000..5d18d0a39584 --- /dev/null +++ b/tests/pending/neg/yimports-masked/minidef_1.scala @@ -0,0 +1,7 @@ + +package hello.world + +object minidef { + type Numb = Int + final val Magic = 42 +} diff --git a/tests/pending/pos/i16268.scala b/tests/pending/pos/i16268.scala new file mode 100644 index 000000000000..6b44e71a2247 --- /dev/null +++ b/tests/pending/pos/i16268.scala @@ -0,0 +1,25 @@ +import language.experimental.captureChecking +class Tree +case class Thicket(trees: List[Tree]) extends Tree + +def test1(segments: List[{*} Tree]) = + val elems = segments flatMap { (t: {*} Tree) => t match // error + case ts: Thicket => ts.trees.tail + case t => Nil + } + elems + +def test2(segments: List[{*} Tree]) = + val f = (t: {*} Tree) => t match + case ts: Thicket => ts.trees.tail + case t => Nil + val elems = segments.flatMap(f) // error + elems + +def test3(c: {*} Any)(segments: List[{c} Tree]) = + val elems = segments flatMap { (t: {c} Tree) => t match + case ts: Thicket => ts.trees.tail + case t => Nil + } + elems + diff --git a/tests/pending/pos/i16826.scala b/tests/pending/pos/i16826.scala new file mode 100644 index 000000000000..a938ab42dac3 --- /dev/null +++ b/tests/pending/pos/i16826.scala @@ -0,0 +1,10 @@ +import language.experimental.captureChecking +class A +class B(a: {*} A) +class C(a: {*} A): + def setB(b: {a} B): Unit = ??? + + +def test(a1: {*} A)(b1: {a1} B) = + val c = new C(a1) + c.setB(b1) diff --git a/tests/pending/run/i15893.scala b/tests/pending/run/i15893.scala index dedec2138f2a..d9cd2822e971 100644 --- a/tests/pending/run/i15893.scala +++ b/tests/pending/run/i15893.scala @@ -24,7 +24,7 @@ transparent inline def transparentInlineMod2(inline n: NatT): NatT = inline n m case Succ(Zero()) => Succ(Zero()) case Succ(Succ(predPredN)) => transparentInlineMod2(predPredN) */ -def dependentlyTypedMod2[N <: NatT](n: N): Mod2[N] = n match // exhaustivity warning; unexpected +def dependentlyTypedMod2[N <: NatT](n: N): Mod2[N] = n match case Zero(): Zero => Zero() case Succ(Zero()): Succ[Zero] => Succ(Zero()) case Succ(Succ(predPredN)): Succ[Succ[_]] => dependentlyTypedMod2(predPredN) @@ -61,5 +61,5 @@ inline def transparentInlineFoo(inline n: NatT): NatT = inline transparentInline println(transparentInlineFoo(Succ(Succ(Succ(Zero()))))) // prints Zero(), as expected */ println(dependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // runtime error; unexpected -// println(inlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // doesn't compile; unexpected -// println(transparentInlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // doesn't compile; unexpected +// println(inlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected +// println(transparentInlineDependentlyTypedMod2(Succ(Succ(Succ(Zero()))))) // prints Succ(Zero()), as expected diff --git a/tests/pos-custom-args/bounded1.scala b/tests/pos-custom-args/bounded1.scala index 5fb7f0da904b..e16da4935a14 100644 --- a/tests/pos-custom-args/bounded1.scala +++ b/tests/pos-custom-args/bounded1.scala @@ -1,27 +1,27 @@ // To be revisited class CC -type Cap = {*} CC +type Cap = CC^ def test(c: Cap) = - class B[X <: {c} Object](x: X): + class B[X <: Object^{c}](x: X): def elem = x def lateElem = () => x def f(x: Int): Int = if c == c then x else 0 val b = new B(f) val r1 = b.elem - val r1c: {c} Int -> Int = r1 + val r1c: Int^{c} -> Int = r1 val r2 = b.lateElem - val r2c: () -> {c} Int -> Int = r2 // was error now OK + val r2c: () -> Int^{c} -> Int = r2 // was error now OK def test2(c: Cap) = - class B[X <: {*} Any](x: X): + class B[X <: Any^](x: X): def elem = x def lateElem = () => x def f(x: Int): Int = if c == c then x else 0 val b = new B(f) val r1 = b.elem - val r1c: {c} Int -> Int = r1 + val r1c: Int ->{c} Int = r1 val r2 = b.lateElem - val r2c: () -> {c} Int -> Int = r2 // was error now OK \ No newline at end of file + val r2c: () -> Int ->{c} Int = r2 // was error now OK \ No newline at end of file diff --git a/tests/pos-custom-args/captures/bounded.scala b/tests/pos-custom-args/captures/bounded.scala index 85c1a67387b5..7959df7d50cf 100644 --- a/tests/pos-custom-args/captures/bounded.scala +++ b/tests/pos-custom-args/captures/bounded.scala @@ -1,14 +1,14 @@ class CC -type Cap = {*} CC +type Cap = CC^ def test(c: Cap) = - class B[X <: {c} Object](x: X): + class B[X <: Object^{c}](x: X): def elem = x def lateElem = () => x def f(x: Int): Int = if c == c then x else 0 val b = new B(f) val r1 = b.elem - val r1c: {c} Int -> Int = r1 + val r1c: Int ->{c} Int = r1 val r2 = b.lateElem - val r2c: {c} () -> {c} Int -> Int = r2 \ No newline at end of file + val r2c: () ->{c} Int ->{c} Int = r2 \ No newline at end of file diff --git a/tests/pos-custom-args/captures/boxed1.scala b/tests/pos-custom-args/captures/boxed1.scala index ba198335f51d..8c6b63ef0134 100644 --- a/tests/pos-custom-args/captures/boxed1.scala +++ b/tests/pos-custom-args/captures/boxed1.scala @@ -6,6 +6,6 @@ def foo(x: => Int): Unit = () def test(c: Cap) = val f = () => { c; 1 } - val _: {c} () -> Int = f + val _: () ->{c} Int = f val g = () => Box(f) - val _: () -> Box[{f} () -> Int] = g + val _: () -> Box[() ->{f} Int] = g diff --git a/tests/pos-custom-args/captures/boxmap-paper.scala b/tests/pos-custom-args/captures/boxmap-paper.scala index aff4c38e1b9d..9d5bb49af25d 100644 --- a/tests/pos-custom-args/captures/boxmap-paper.scala +++ b/tests/pos-custom-args/captures/boxmap-paper.scala @@ -12,25 +12,25 @@ def map[A, B](c: Cell[A])(f: A => B): Cell[B] def pureMap[A, B](c: Cell[A])(f: A -> B): Cell[B] = c[Cell[B]]((x: A) => cell(f(x))) -def lazyMap[A, B](c: Cell[A])(f: A => B): {f} () -> Cell[B] +def lazyMap[A, B](c: Cell[A])(f: A => B): () ->{f} Cell[B] = () => c[Cell[B]]((x: A) => cell(f(x))) trait IO: def print(s: String): Unit -def test(io: {*} IO) = +def test(io: IO^) = - val loggedOne: {io} () -> Int = () => { io.print("1"); 1 } + val loggedOne: () ->{io} Int = () => { io.print("1"); 1 } - val c: Cell[{io} () -> Int] - = cell[{io} () -> Int](loggedOne) + val c: Cell[() ->{io} Int] + = cell[() ->{io} Int](loggedOne) - val g = (f: {io} () -> Int) => + val g = (f: () ->{io} Int) => val x = f(); io.print(" + ") val y = f(); io.print(s" = ${x + y}") - val r = lazyMap[{io} () -> Int, Unit](c)(f => g(f)) - val r2 = lazyMap[{io} () -> Int, Unit](c)(g) + val r = lazyMap[() ->{io} Int, Unit](c)(f => g(f)) + val r2 = lazyMap[() ->{io} Int, Unit](c)(g) val r3 = lazyMap(c)(g) val _ = r() val _ = r2() diff --git a/tests/pos-custom-args/captures/byname.scala b/tests/pos-custom-args/captures/byname.scala index 35b8876d0058..efd76618469d 100644 --- a/tests/pos-custom-args/captures/byname.scala +++ b/tests/pos-custom-args/captures/byname.scala @@ -1,12 +1,12 @@ import annotation.retainsByName class CC -type Cap = {*} CC +type Cap = CC^ class I -def test(cap1: Cap, cap2: Cap): {cap1} I = +def test(cap1: Cap, cap2: Cap): I^{cap1} = def f() = if cap1 == cap1 then I() else I() - def h(x: {cap1}-> I) = x + def h(x: ->{cap} I) = x h(f()) // OK def hh(x: -> I @retainsByName(cap1)) = x h(f()) diff --git a/tests/pos-custom-args/captures/bynamefun.scala b/tests/pos-custom-args/captures/bynamefun.scala new file mode 100644 index 000000000000..86bad201ffc3 --- /dev/null +++ b/tests/pos-custom-args/captures/bynamefun.scala @@ -0,0 +1,11 @@ +object test: + class Plan(elem: Plan) + object SomePlan extends Plan(???) + def f1(expr: (-> Plan) -> Plan): Plan = expr(SomePlan) + f1 { onf => Plan(onf) } + def f2(expr: (=> Plan) -> Plan): Plan = ??? + f2 { onf => Plan(onf) } + def f3(expr: (-> Plan) => Plan): Plan = ??? + f1 { onf => Plan(onf) } + def f4(expr: (=> Plan) => Plan): Plan = ??? + f2 { onf => Plan(onf) } diff --git a/tests/pos-custom-args/captures/caps-universal.scala b/tests/pos-custom-args/captures/caps-universal.scala index d84f2b7b2584..3768c640fd68 100644 --- a/tests/pos-custom-args/captures/caps-universal.scala +++ b/tests/pos-custom-args/captures/caps-universal.scala @@ -1,7 +1,7 @@ import annotation.retains val foo: Int => Int = x => x -val bar: (Int -> Int) @retains(caps.*) = foo -val baz: {*} Int -> Int = bar +val bar: (Int -> Int) @retains(caps.cap) = foo +val baz: Int => Int = bar diff --git a/tests/pos-custom-args/captures/capt-capability.scala b/tests/pos-custom-args/captures/capt-capability.scala index 4dbd6e32f2a4..830d341c7bca 100644 --- a/tests/pos-custom-args/captures/capt-capability.scala +++ b/tests/pos-custom-args/captures/capt-capability.scala @@ -1,7 +1,7 @@ import annotation.capability @capability class Cap -def f1(c: Cap): {c} () -> c.type = () => c // ok +def f1(c: Cap): () ->{c} c.type = () => c // ok def f2: Int = val g: Boolean => Int = ??? @@ -17,8 +17,8 @@ def f3: Int = def foo() = val x: Cap = ??? val y: Cap = x - val x2: {x} () -> Cap = ??? - val y2: {x} () -> Cap = x2 + val x2: () ->{x} Cap = ??? + val y2: () ->{x} Cap = x2 val z1: () => Cap = f1(x) def h[X](a: X)(b: X) = a diff --git a/tests/pos-custom-args/captures/capt-depfun.scala b/tests/pos-custom-args/captures/capt-depfun.scala index 0e9786b2ee34..e3abbe0994c5 100644 --- a/tests/pos-custom-args/captures/capt-depfun.scala +++ b/tests/pos-custom-args/captures/capt-depfun.scala @@ -1,6 +1,6 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) type T = (x: Cap) -> String @retains(x) @@ -8,7 +8,7 @@ type ID[X] = X val aa: ((x: Cap) -> String @retains(x)) = (x: Cap) => "" -def f(y: Cap, z: Cap): String @retains(caps.*) = +def f(y: Cap, z: Cap): String @retains(caps.cap) = val a: ((x: Cap) -> String @retains(x)) = (x: Cap) => "" val b = a(y) val c: String @retains(y) = b @@ -16,6 +16,6 @@ def f(y: Cap, z: Cap): String @retains(caps.*) = val d = a(g()) val ac: ((x: Cap) -> ID[String @retains(x) -> String @retains(x)]) = ??? - val bc: (({y} String) -> {y} String) = ac(y) - val dc: (String -> {y, z} String) = ac(g()) + val bc: String^{y} -> String^{y} = ac(y) + val dc: String -> String^{y, z} = ac(g()) c diff --git a/tests/pos-custom-args/captures/capt-depfun2.scala b/tests/pos-custom-args/captures/capt-depfun2.scala index 1c747d5885e6..e4645cfcc920 100644 --- a/tests/pos-custom-args/captures/capt-depfun2.scala +++ b/tests/pos-custom-args/captures/capt-depfun2.scala @@ -1,9 +1,9 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? val ac: ((x: Cap) -> Array[String @retains(x)]) = ??? - val dc: Array[? >: String <: {y, z} String] = ac(g()) // needs to be inferred + val dc: Array[? >: String <: String]^{y, z} = ac(g()) // needs to be inferred val ec = ac(y) diff --git a/tests/pos-custom-args/captures/capt-env.scala b/tests/pos-custom-args/captures/capt-env.scala new file mode 100644 index 000000000000..be24ed618606 --- /dev/null +++ b/tests/pos-custom-args/captures/capt-env.scala @@ -0,0 +1,8 @@ +class C +type Cap = C^ + +def test(c: Cap) = + def x = () => () => c; () + def y = () => x() + def z = () => x()() + diff --git a/tests/pos-custom-args/captures/capt-test.scala b/tests/pos-custom-args/captures/capt-test.scala index 6ee0d2a4d9f4..e229c685d846 100644 --- a/tests/pos-custom-args/captures/capt-test.scala +++ b/tests/pos-custom-args/captures/capt-test.scala @@ -19,7 +19,10 @@ def map[A, B](f: A => B)(xs: LIST[A]): LIST[B] = xs.map(f) class C -type Cap = {*} C +type Cap = C^ + +class Foo(x: Cap): + this: Foo^{x} => def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () @@ -29,7 +32,7 @@ def test(c: Cap, d: Cap) = val zs = val z = g CONS(z, ys) - val zsc: LIST[{d, y} Cap -> Unit] = zs + val zsc: LIST[Cap ->{d, y} Unit] = zs val a4 = zs.map(identity) - val a4c: LIST[{d, y} Cap -> Unit] = a4 + val a4c: LIST[Cap ->{d, y} Unit] = a4 diff --git a/tests/pos-custom-args/captures/capt0.scala b/tests/pos-custom-args/captures/capt0.scala index 52d6253af46b..013ff3a4ee19 100644 --- a/tests/pos-custom-args/captures/capt0.scala +++ b/tests/pos-custom-args/captures/capt0.scala @@ -1,7 +1,7 @@ object Test: def test() = - val x: {*} Any = "abc" + val x: Any^ = "abc" val y: Object @scala.annotation.retains(x) = ??? - val z: Object @scala.annotation.retains(x, caps.*) = y: Object @annotation.retains(x) + val z: Object @scala.annotation.retains(x, caps.cap) = y: Object @annotation.retains(x) diff --git a/tests/pos-custom-args/captures/capt1.scala b/tests/pos-custom-args/captures/capt1.scala index cc39790623d4..8d2285f1fa50 100644 --- a/tests/pos-custom-args/captures/capt1.scala +++ b/tests/pos-custom-args/captures/capt1.scala @@ -1,9 +1,9 @@ class C -type Cap = {*} C -def f1(c: Cap): {c} () -> c.type = () => c // ok +type Cap = C^ +def f1(c: Cap): () ->{c} c.type = () => c // ok def f2: Int = - val g: {*} Boolean -> Int = ??? + val g: Boolean ->{cap} Int = ??? val x = g(true) x @@ -13,11 +13,11 @@ def f3: Int = val x = g.apply(true) x -def foo(): {*} C = - val x: {*} C = ??? - val y: {x} C = x - val x2: {x} () -> C = ??? - val y2: {x} () -> {x} C = x2 +def foo(): C^ = + val x: C^ = ??? + val y: C^{x} = x + val x2: () ->{x} C = ??? + val y2: () ->{x} C^{x} = x2 val z1: () => Cap = f1(x) def h[X](a: X)(b: X) = a diff --git a/tests/pos-custom-args/captures/capt2.scala b/tests/pos-custom-args/captures/capt2.scala index 77c0caaf0f1d..45381bf602ed 100644 --- a/tests/pos-custom-args/captures/capt2.scala +++ b/tests/pos-custom-args/captures/capt2.scala @@ -1,9 +1,9 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) def test1() = - val y: {*} String = "" + val y: String^ = "" def x: Object @retains(y) = y def test2() = @@ -13,8 +13,8 @@ def test2() = z: (() -> Unit) @retains(x) def z2: (() -> Unit) @retains(y) = y z2: (() -> Unit) @retains(y) - val p: {*} () -> String = () => "abc" - val q: {p} C = ??? - val _ = p: ({p} () -> String) + val p: () => String = () => "abc" + val q: C^{p} = ??? + val _ = p: (() ->{p} String) diff --git a/tests/pos-custom-args/captures/caseclass.scala b/tests/pos-custom-args/captures/caseclass.scala index a845da181e9f..ffbf878dca49 100644 --- a/tests/pos-custom-args/captures/caseclass.scala +++ b/tests/pos-custom-args/captures/caseclass.scala @@ -1,6 +1,6 @@ @annotation.capability class C object test1: - case class Ref(x: {*} String) + case class Ref(x: String^) def test(c: C) = val x1 = Ref("hello") @@ -14,7 +14,7 @@ object test2: val pure: () -> Unit = () => () val impure: () => Unit = pure - val mixed: {c} () -> Unit = pure + val mixed: () ->{c} Unit = pure val x = Ref(impure) val y0 = x.copy(pure) val yc0: Ref = y0 @@ -25,10 +25,10 @@ object test2: val yc2: Ref = y2 val x3 = Ref(mixed) - val _: {c} Ref = x3 + val _: Ref^{c} = x3 val y3 = x3.copy() - val yc3: {c} Ref = y3 + val yc3: Ref^{c} = y3 val y4 = y3 match case Ref(xx) => xx - val y4c: {x3} () -> Unit = y4 + val y4c: () ->{x3} Unit = y4 diff --git a/tests/pos-custom-args/captures/cc-dep-param.scala b/tests/pos-custom-args/captures/cc-dep-param.scala new file mode 100644 index 000000000000..1440cd4d7d40 --- /dev/null +++ b/tests/pos-custom-args/captures/cc-dep-param.scala @@ -0,0 +1,8 @@ +import language.experimental.captureChecking + +trait Foo[T] +def test(): Unit = + val a: Foo[Int]^ = ??? + val useA: () ->{a} Unit = ??? + def foo[X](x: Foo[X]^, op: () ->{x} Unit): Unit = ??? + foo(a, useA) diff --git a/tests/pos-custom-args/captures/cc-expand.scala b/tests/pos-custom-args/captures/cc-expand.scala index 87b2c34caf5f..1bed7b1cf001 100644 --- a/tests/pos-custom-args/captures/cc-expand.scala +++ b/tests/pos-custom-args/captures/cc-expand.scala @@ -5,11 +5,11 @@ object Test: class B class C class CTC - type CT = CTC @retains(caps.*) + type CT = CTC @retains(caps.cap) def test(ct: CT, dt: CT) = - def x0: A -> {ct} B = ??? + def x0: A -> B^{ct} = ??? def x1: A -> B @retains(ct) = ??? def x2: A -> B -> C @retains(ct) = ??? diff --git a/tests/pos-custom-args/captures/cc-this.scala b/tests/pos-custom-args/captures/cc-this.scala index 77414fa9b8c0..2124ee494041 100644 --- a/tests/pos-custom-args/captures/cc-this.scala +++ b/tests/pos-custom-args/captures/cc-this.scala @@ -5,7 +5,7 @@ def eff(using Cap): Unit = () def test(using Cap) = class C(val x: () => Int): - val y: {*} C = this + val y: C^ = this def f = () => eff @@ -14,4 +14,4 @@ def test(using Cap) = def c1 = new C(f) def c2 = c1 def c3 = c2.y - val _ = c3: {*} C + val _ = c3: C^ diff --git a/tests/pos-custom-args/captures/classes.scala b/tests/pos-custom-args/captures/classes.scala index f14a7e6dd84e..bc827dcfc67d 100644 --- a/tests/pos-custom-args/captures/classes.scala +++ b/tests/pos-custom-args/captures/classes.scala @@ -1,22 +1,22 @@ import annotation.retains class B -type Cap = {*} B +type Cap = B^ class C(val n: Cap): - this: {n} C => - def foo(): {n} B = n + this: C^{n} => + def foo(): B^{n} = n def test(x: Cap, y: Cap, z: Cap) = val c0 = C(x) - val c1: {x} C {val n: {x} B} = c0 + val c1: C{val n: B^{x}}^{x} = c0 val d = c1.foo() - d: {x} B + d: B^{x} val c2 = if ??? then C(x) else C(y) val c2a = identity(c2) - val c3: {x, y} C { val n: {x, y} B } = c2 + val c3: C{ val n: B^{x, y} }^{x, y} = c2 val d1 = c3.foo() - d1: B @retains(x, y) + d1: B^{x, y} class Local: @@ -29,7 +29,7 @@ def test(x: Cap, y: Cap, z: Cap) = end Local val l = Local() - val l1: {x, y} Local = l + val l1: Local^{x, y} = l val l2 = Local(x) - val l3: {x, y, z} Local = l2 + val l3: Local^{x, y, z} = l2 diff --git a/tests/pos-custom-args/captures/cmp-singleton-2.scala b/tests/pos-custom-args/captures/cmp-singleton-2.scala new file mode 100644 index 000000000000..daaa4add3858 --- /dev/null +++ b/tests/pos-custom-args/captures/cmp-singleton-2.scala @@ -0,0 +1,11 @@ +class T +class A extends T +class B extends T + +def test(tp: T) = + val mapping: Map[A, String] = ??? + + tp match + case a: A => mapping(a) match + case s: String => B() + case null => a diff --git a/tests/pos-custom-args/captures/cmp-singleton.scala b/tests/pos-custom-args/captures/cmp-singleton.scala new file mode 100644 index 000000000000..45b4009f5e89 --- /dev/null +++ b/tests/pos-custom-args/captures/cmp-singleton.scala @@ -0,0 +1,10 @@ +class Denotation +abstract class SingleDenotation extends Denotation +def goRefined: Denotation = + val foo: Denotation = ??? + val joint = foo + joint match + case joint: SingleDenotation => + joint + case _ => + joint \ No newline at end of file diff --git a/tests/pos-custom-args/captures/compare-refined.scala b/tests/pos-custom-args/captures/compare-refined.scala index c60bfee602b3..306f2216ab82 100644 --- a/tests/pos-custom-args/captures/compare-refined.scala +++ b/tests/pos-custom-args/captures/compare-refined.scala @@ -2,11 +2,11 @@ abstract class LIST[+T]: def map[U](f: T => U): LIST[U] = ??? class C -type Cap = {*} C +type Cap = C^ def test(d: Cap) = - val zsc: LIST[{d} Cap -> Unit] = ??? - val a4 = zsc.map[{d} Cap -> Unit]((x: {d} Cap -> Unit) => x) - val a5 = zsc.map[{d} Cap -> Unit](identity[{d} Cap -> Unit]) - val a6 = zsc.map(identity[{d} Cap -> Unit]) + val zsc: LIST[Cap ->{d} Unit] = ??? + val a4 = zsc.map[Cap ->{d} Unit]((x: Cap ->{d} Unit) => x) + val a5 = zsc.map[Cap ->{d} Unit](identity[Cap ->{d} Unit]) + val a6 = zsc.map(identity[Cap ->{d} Unit]) val a7 = zsc.map(identity) diff --git a/tests/pos-custom-args/captures/curried-shorthands.scala b/tests/pos-custom-args/captures/curried-shorthands.scala index 7c58729a3041..c68dc4b5cdbf 100644 --- a/tests/pos-custom-args/captures/curried-shorthands.scala +++ b/tests/pos-custom-args/captures/curried-shorthands.scala @@ -10,15 +10,15 @@ object Test: val f3 = (f: Int => Int) => println(f(3)) (xs: List[Int]) => xs.map(_ + 1) - val f3c: (Int => Int) -> {} List[Int] -> List[Int] = f3 + val f3c: (Int => Int) -> List[Int] ->{} List[Int] = f3 class LL[A]: - def drop(n: Int): {this} LL[A] = ??? + def drop(n: Int): LL[A]^{this} = ??? def test(ct: CanThrow[Exception]) = - def xs: {ct} LL[Int] = ??? + def xs: LL[Int]^{ct} = ??? val ys = xs.drop(_) - val ysc: Int -> {ct} LL[Int] = ys + val ysc: Int -> LL[Int]^{ct} = ys diff --git a/tests/pos-custom-args/captures/filevar.scala b/tests/pos-custom-args/captures/filevar.scala new file mode 100644 index 000000000000..a6cc7ca9ff47 --- /dev/null +++ b/tests/pos-custom-args/captures/filevar.scala @@ -0,0 +1,37 @@ +import language.experimental.captureChecking +import annotation.capability +import compiletime.uninitialized + +object test1: + class File: + def write(x: String): Unit = ??? + + class Service(f: File^): + def log = f.write("log") + + def withFile[T](op: (f: File^) => T): T = + op(new File) + + def test = + withFile: f => + val o = Service(f) + o.log + +object test2: + @capability class IO + + class File: + def write(x: String): Unit = ??? + + class Service(io: IO): + var file: File^{io} = uninitialized + def log = file.write("log") + + def withFile[T](io: IO)(op: (f: File^{io}) => T): T = + op(new File) + + def test(io: IO) = + withFile(io): f => + val o = Service(io) + o.file = f + o.log diff --git a/tests/pos-custom-args/captures/foreach.scala b/tests/pos-custom-args/captures/foreach.scala new file mode 100644 index 000000000000..b7dfc49272a9 --- /dev/null +++ b/tests/pos-custom-args/captures/foreach.scala @@ -0,0 +1,4 @@ +import caps.unsafe.* +def test = + val tasks = new collection.mutable.ArrayBuffer[() => Unit] + val _: Unit = tasks.foreach(((task: () => Unit) => task()).unsafeBoxFunArg) diff --git a/tests/pos-custom-args/captures/gadt-ycheck.scala b/tests/pos-custom-args/captures/gadt-ycheck.scala new file mode 100644 index 000000000000..946763b53e7e --- /dev/null +++ b/tests/pos-custom-args/captures/gadt-ycheck.scala @@ -0,0 +1,14 @@ +package test + +import reflect.ClassTag +import language.experimental.pureFunctions + +object Settings: + val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) + + class Setting[T: ClassTag](propertyClass: Option[Class[?]]): + def tryToSet() = + def update(value: Any): String = ??? + implicitly[ClassTag[T]] match + case OptionTag => + update(Some(propertyClass.get.getConstructor().newInstance())) diff --git a/tests/pos-custom-args/captures/hk-param.scala b/tests/pos-custom-args/captures/hk-param.scala index b0e894d865e9..bf2f75f29e7f 100644 --- a/tests/pos-custom-args/captures/hk-param.scala +++ b/tests/pos-custom-args/captures/hk-param.scala @@ -1,17 +1,17 @@ /** Concrete collection type: View */ -trait View[+A] extends Itable[A], ILike[A, [X] =>> {*} View[X]]: - override def fromIterable[B](c: {*} Itable[B]): {c} View[B] = ??? +trait View[+A] extends Itable[A], ILike[A, [X] =>> View[X]^]: + override def fromIterable[B](c: Itable[B]^): View[B]^{c} = ??? trait IPolyTransforms[+A, +C[A]] extends Any: - def fromIterable[B](coll: {*} Itable[B]): C[B] + def fromIterable[B](coll: Itable[B]^): C[B] -trait ILike[+A, +C[X] <: {*} Itable[X]] extends IPolyTransforms[A, C] +trait ILike[+A, +C[X] <: Itable[X]^] extends IPolyTransforms[A, C] /** Base trait for generic collections */ -trait Itable[+A] extends ItableOnce[A] with ILike[A, {*} Itable] +trait Itable[+A] extends ItableOnce[A] with ILike[A, Itable^] /** Iterator can be used only once */ trait ItableOnce[+A] { - this: {*} ItableOnce[A] => - def iterator: {this} Iterator[A] + this: ItableOnce[A]^ => + def iterator: Iterator[A]^{this} } diff --git a/tests/pos-custom-args/captures/i15749.scala b/tests/pos-custom-args/captures/i15749.scala new file mode 100644 index 000000000000..4959c003a918 --- /dev/null +++ b/tests/pos-custom-args/captures/i15749.scala @@ -0,0 +1,15 @@ +class Unit +object unit extends Unit + +type Top = Any^{cap} + +type LazyVal[T] = Unit ->{cap} T + +class Foo[T](val x: T) + +// Foo[□ Unit => T] +type BoxedLazyVal[T] = Foo[LazyVal[T]] + +def force[A](v: BoxedLazyVal[A]): A = + // Γ ⊢ v.x : □ {cap} Unit -> A + v.x(unit) // was error: (unbox v.x)(unit), where (unbox v.x) should be untypable, now ok \ No newline at end of file diff --git a/tests/pos-custom-args/captures/i15749a.scala b/tests/pos-custom-args/captures/i15749a.scala new file mode 100644 index 000000000000..fe5f4d75dae1 --- /dev/null +++ b/tests/pos-custom-args/captures/i15749a.scala @@ -0,0 +1,21 @@ +class Unit +object u extends Unit + +type Top = Any^ + +type Wrapper[T] = [X] -> (op: T ->{cap} X) -> X + +def test = + + def wrapper[T](x: T): Wrapper[T] = + [X] => (op: T ->{cap} X) => op(x) + + def strictMap[A <: Top, sealed B <: Top](mx: Wrapper[A])(f: A ->{cap} B): Wrapper[B] = + mx((x: A) => wrapper(f(x))) + + def force[A](thunk: Unit ->{cap} A): A = thunk(u) + + def forceWrapper[sealed A](mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = + // Γ ⊢ mx: Wrapper[□ {cap} Unit => A] + // `force` should be typed as ∀(□ {cap} Unit -> A) A, but it can not + strictMap[Unit ->{cap} A, A](mx)(t => force[A](t)) // error diff --git a/tests/pos-custom-args/captures/i15922.scala b/tests/pos-custom-args/captures/i15922.scala index 8547f7598eef..23109a3ba8f4 100644 --- a/tests/pos-custom-args/captures/i15922.scala +++ b/tests/pos-custom-args/captures/i15922.scala @@ -2,13 +2,13 @@ trait Cap { def use(): Int } type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) -def withCap[X](op: ({*} Cap) => X): X = { - val cap: {*} Cap = new Cap { def use() = { println("cap is used"); 0 } } +def withCap[X](op: (Cap^) => X): X = { + val cap: Cap^ = new Cap { def use() = { println("cap is used"); 0 } } val result = op(cap) result } -def leaking(c: {*} Cap): Id[{c} Cap] = mkId(c) +def leaking(c: Cap^): Id[Cap^{c}] = mkId(c) def test = val bad = withCap(leaking) diff --git a/tests/pos-custom-args/captures/i15923-cases.scala b/tests/pos-custom-args/captures/i15923-cases.scala new file mode 100644 index 000000000000..136b8950eb26 --- /dev/null +++ b/tests/pos-custom-args/captures/i15923-cases.scala @@ -0,0 +1,15 @@ +trait Cap { def use(): Int } +type Id[X] = [T] -> (op: X => T) -> T +def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) + +def foo(x: Id[Cap^{cap}]) = { + x(_.use()) // was error, now OK +} + +def bar(io: Cap^{cap}, x: Id[Cap^{io}]) = { + x(_.use()) +} + +def barAlt(a: Cap^{cap}, b: Cap^{cap}, x: Id[Cap]^{a, b}) = { + x(_.use()) +} diff --git a/tests/pos-custom-args/captures/i15925.scala b/tests/pos-custom-args/captures/i15925.scala new file mode 100644 index 000000000000..63b6962ff9f8 --- /dev/null +++ b/tests/pos-custom-args/captures/i15925.scala @@ -0,0 +1,13 @@ +import language.experimental.captureChecking + +class Unit +object u extends Unit + +type Foo[X] = [T] -> (op: X => T) -> T +type Lazy[X] = Unit => X + +def force[X](fx: Foo[Lazy[X]]): X = + fx[X](f => f(u)) + +def force2[X](fx: Foo[Unit => X]): X = + fx[X](f => f(u)) diff --git a/tests/pos-custom-args/captures/i16116.scala b/tests/pos-custom-args/captures/i16116.scala new file mode 100644 index 000000000000..0311e744f146 --- /dev/null +++ b/tests/pos-custom-args/captures/i16116.scala @@ -0,0 +1,39 @@ +package x + +import scala.annotation.* +import scala.concurrent.* + +trait CpsMonad[F[_]] { + type Context +} + +object CpsMonad { + type Aux[F[_],C] = CpsMonad[F] { type Context = C } + given CpsMonad[Future] with {} +} + +@experimental +object Test { + + @capability + class CpsTransform[F[_]] { + def await[T](ft: F[T]): T^{ this } = ??? + } + + transparent inline def cpsAsync[F[_]](using m:CpsMonad[F]) = + new Test.InfernAsyncArg + + class InfernAsyncArg[F[_],C](using am:CpsMonad.Aux[F,C]) { + def apply[A](expr: (CpsTransform[F], C) ?=> A): F[A] = ??? + } + + def asyncPlus[F[_]](a:Int, b:F[Int])(using cps: CpsTransform[F]): Int^{ cps } = + a + (cps.await(b).asInstanceOf[Int]) + + def testExample1Future(): Unit = + val fr = cpsAsync[Future] { + val y = asyncPlus(1,Future successful 2).asInstanceOf[Int] + y+1 + } + +} diff --git a/tests/pos-custom-args/captures/i16226.scala b/tests/pos-custom-args/captures/i16226.scala new file mode 100644 index 000000000000..4cd7f0ceea81 --- /dev/null +++ b/tests/pos-custom-args/captures/i16226.scala @@ -0,0 +1,14 @@ +@annotation.capability class Cap + +class LazyRef[T](val elem: () => T): + val get: () ->{elem} T = elem + def map[U](f: T => U): LazyRef[U]^{f, this} = + new LazyRef(() => f(elem())) + +def map[A, B](ref: LazyRef[A]^, f: A => B): LazyRef[B]^{f, ref} = + new LazyRef(() => f(ref.elem())) + +def main(io: Cap) = { + def mapd[A, B]: (LazyRef[A]^{io}, A => B) => LazyRef[B]^ = + (ref1, f1) => map[A, B](ref1, f1) +} diff --git a/tests/pos-custom-args/captures/i16226a.scala b/tests/pos-custom-args/captures/i16226a.scala new file mode 100644 index 000000000000..444d7f2ed0d7 --- /dev/null +++ b/tests/pos-custom-args/captures/i16226a.scala @@ -0,0 +1,13 @@ +class Name +class TermName extends Name +class TypeName extends Name + +trait ParamInfo: + type ThisName <: Name + def variance: Long +object ParamInfo: + type Of[N <: Name] = ParamInfo { type ThisName = N } + +def test(tparams1: List[ParamInfo{ type ThisName = TypeName }], tparams2: List[ParamInfo.Of[TypeName]]) = + tparams1.lazyZip(tparams2).map((p1, p2) => p1.variance + p2.variance) + diff --git a/tests/pos-custom-args/captures/i16871.scala b/tests/pos-custom-args/captures/i16871.scala new file mode 100644 index 000000000000..3251a7135346 --- /dev/null +++ b/tests/pos-custom-args/captures/i16871.scala @@ -0,0 +1,3 @@ +import scala.language.experimental.captureChecking + +val f: [X] => Int => Int = [X] => (x: Int) => x \ No newline at end of file diff --git a/tests/pos-custom-args/captures/iterators.scala b/tests/pos-custom-args/captures/iterators.scala index 50be2012e25c..10a7f57cd68f 100644 --- a/tests/pos-custom-args/captures/iterators.scala +++ b/tests/pos-custom-args/captures/iterators.scala @@ -1,19 +1,19 @@ package cctest abstract class Iterator[T]: - thisIterator: {*} Iterator[T] => + thisIterator: Iterator[T]^ => def hasNext: Boolean def next: T - def map(f: {*} T => T): {f, this} Iterator[T] = new Iterator: + def map(f: T => T): Iterator[T]^{f, this} = new Iterator: def hasNext = thisIterator.hasNext def next = f(thisIterator.next) end Iterator class C -type Cap = {*} C +type Cap = C^ -def map[T, U](it: {*} Iterator[T], f: {*} T => U): {it, f} Iterator[U] = new Iterator: +def map[T, U](it: Iterator[T]^, f: T^ => U): Iterator[U]^{it, f} = new Iterator: def hasNext = it.hasNext def next = f(it.next) diff --git a/tests/pos-custom-args/captures/lazylists-exceptions.scala b/tests/pos-custom-args/captures/lazylists-exceptions.scala index 2d4ebb245dca..8f1fba2bf2dc 100644 --- a/tests/pos-custom-args/captures/lazylists-exceptions.scala +++ b/tests/pos-custom-args/captures/lazylists-exceptions.scala @@ -4,52 +4,52 @@ import scala.compiletime.uninitialized trait LzyList[+A]: def isEmpty: Boolean def head: A - def tail: {this} LzyList[A] + def tail: LzyList[A]^{this} object LzyNil extends LzyList[Nothing]: def isEmpty = true def head = ??? def tail = ??? -final class LzyCons[+A](hd: A, tl: () => {*} LzyList[A]) extends LzyList[A]: +final class LzyCons[+A](hd: A, tl: () => LzyList[A]^) extends LzyList[A]: private var forced = false - private var cache: {this} LzyList[A] = uninitialized + private var cache: LzyList[A]^{this} = uninitialized private def force = if !forced then { cache = tl(); forced = true } cache def isEmpty = false def head = hd - def tail: {this} LzyList[A] = force + def tail: LzyList[A]^{this} = force end LzyCons -extension [A](xs: {*} LzyList[A]) - def map[B](f: A => B): {xs, f} LzyList[B] = +extension [A](xs: LzyList[A]^) + def map[B](f: A => B): LzyList[B]^{xs, f} = if xs.isEmpty then LzyNil else LzyCons(f(xs.head), () => xs.tail.map(f)) - def filter(p: A => Boolean): {xs, p} LzyList[A] = + def filter(p: A => Boolean): LzyList[A]^{xs, p} = if xs.isEmpty then LzyNil else if p(xs.head) then lazyCons(xs.head, xs.tail.filter(p)) else xs.tail.filter(p) - def concat(ys: {*} LzyList[A]): {xs, ys} LzyList[A] = + def concat(ys: LzyList[A]^): LzyList[A]^{xs, ys} = if xs.isEmpty then ys else xs.head #: xs.tail.concat(ys) - def drop(n: Int): {xs} LzyList[A] = + def drop(n: Int): LzyList[A]^{xs} = if n == 0 then xs else xs.tail.drop(n - 1) end extension extension [A](x: A) - def #:(xs1: => {*} LzyList[A]): {xs1} LzyList[A] = + def #:(xs1: => LzyList[A]^): LzyList[A]^{xs1} = LzyCons(x, () => xs1) -def lazyCons[A](x: A, xs1: => {*} LzyList[A]): {xs1} LzyList[A] = +def lazyCons[A](x: A, xs1: => LzyList[A]^): LzyList[A]^{xs1} = LzyCons(x, () => xs1) -def tabulate[A](n: Int)(gen: Int => A): {gen} LzyList[A] = - def recur(i: Int): {gen} LzyList[A] = +def tabulate[A](n: Int)(gen: Int => A): LzyList[A]^{gen} = + def recur(i: Int): LzyList[A]^{gen} = if i == n then LzyNil else gen(i) #: recur(i + 1) recur(0) @@ -69,16 +69,16 @@ def test(using cap1: CanThrow[Ex1], cap2: CanThrow[Ex2]) = x * x def x1 = xs.map(f) - def x1c: {cap1} LzyList[Int] = x1 + def x1c: LzyList[Int]^{cap1} = x1 def x2 = x1.concat(xs.map(g).filter(_ > 0)) - def x2c: {cap1, cap2} LzyList[Int] = x2 + def x2c: LzyList[Int]^{cap1, cap2} = x2 val x3 = tabulate(10) { i => if i > 9 then throw Ex1() i * i } - val x3c: {cap1} LzyList[Int] = x3 + val x3c: LzyList[Int]^{cap1} = x3 class LimitExceeded extends Exception diff --git a/tests/pos-custom-args/captures/lazylists-mono.scala b/tests/pos-custom-args/captures/lazylists-mono.scala index 44ab36ded6a2..c91bedd8f1cf 100644 --- a/tests/pos-custom-args/captures/lazylists-mono.scala +++ b/tests/pos-custom-args/captures/lazylists-mono.scala @@ -1,26 +1,26 @@ class CC -type Cap = {*} CC +type Cap = CC^ //------------------------------------------------- def test(E: Cap) = trait LazyList[+A]: - protected def contents: {E} () -> (A, {E} LazyList[A]) + protected def contents: () ->{E} (A, LazyList[A]^{E}) def isEmpty: Boolean def head: A = contents()._1 - def tail: {E} LazyList[A] = contents()._2 + def tail: LazyList[A]^{E} = contents()._2 - class LazyCons[+A](override val contents: {E} () -> (A, {E} LazyList[A])) + class LazyCons[+A](override val contents: () ->{E} (A, LazyList[A]^{E})) extends LazyList[A]: def isEmpty: Boolean = false object LazyNil extends LazyList[Nothing]: - def contents: {E} () -> (Nothing, LazyList[Nothing]) = ??? + def contents: () ->{E} (Nothing, LazyList[Nothing]) = ??? def isEmpty: Boolean = true - extension [A](xs: {E} LazyList[A]) - def map[B](f: {E} A -> B): {E} LazyList[B] = + extension [A](xs: LazyList[A]^{E}) + def map[B](f: A ->{E} B): LazyList[B]^{E} = if xs.isEmpty then LazyNil else val cons = () => (f(xs.head), xs.tail.map(f)) diff --git a/tests/pos-custom-args/captures/lazylists.scala b/tests/pos-custom-args/captures/lazylists.scala index fd130c87cdea..273f21c1fcf3 100644 --- a/tests/pos-custom-args/captures/lazylists.scala +++ b/tests/pos-custom-args/captures/lazylists.scala @@ -1,26 +1,26 @@ class CC -type Cap = {*} CC +type Cap = CC^ trait LazyList[+A]: - this: {*} LazyList[A] => + this: LazyList[A]^ => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {xs, f} LazyList[B] = +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{xs, f} = final class Mapped extends LazyList[B]: - this: {xs, f} Mapped => + this: Mapped^{xs, f} => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) // OK + def tail: LazyList[B]^{this} = xs.tail.map(f) // OK if xs.isEmpty then LazyNil else new Mapped @@ -30,12 +30,12 @@ def test(cap1: Cap, cap2: Cap) = val xs = class Initial extends LazyList[String]: - this: {cap1} Initial => + this: Initial^{cap1} => def isEmpty = false def head = f("") def tail = LazyNil new Initial - val xsc: {cap1} LazyList[String] = xs + val xsc: LazyList[String]^{cap1} = xs val ys = xs.map(g) - val ysc: {cap1, cap2} LazyList[String] = ys + val ysc: LazyList[String]^{cap1, cap2} = ys diff --git a/tests/pos-custom-args/captures/lazylists1.scala b/tests/pos-custom-args/captures/lazylists1.scala index a59e7c0da12f..62b34f442221 100644 --- a/tests/pos-custom-args/captures/lazylists1.scala +++ b/tests/pos-custom-args/captures/lazylists1.scala @@ -1,28 +1,28 @@ class CC -type Cap = {*} CC +type Cap = CC^ trait LazyList[+A]: def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] - def concat[B >: A](other: {*} LazyList[B]): {this, other} LazyList[B] + def tail: LazyList[A]^{this} + def concat[B >: A](other: LazyList[B]^): LazyList[B]^{this, other} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? - def concat[B](other: {*} LazyList[B]): {other} LazyList[B] = other + def concat[B](other: LazyList[B]^): LazyList[B]^{other} = other -final class LazyCons[+A](x: A)(xs: () => {*} LazyList[A]) extends LazyList[A]: +final class LazyCons[+A](x: A)(xs: () => LazyList[A]^) extends LazyList[A]: def isEmpty = false def head = x - def tail: {this} LazyList[A] = xs() - def concat[B >: A](other: {*} LazyList[B]): {this, other} LazyList[B] = + def tail: LazyList[A]^{this} = xs() + def concat[B >: A](other: LazyList[B]^): LazyList[B]^{this, other} = LazyCons(head)(() => tail.concat(other)) -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {xs, f} LazyList[B] = +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{xs, f} = if xs.isEmpty then LazyNil else LazyCons(f(xs.head))(() => xs.tail.map(f)) @@ -31,9 +31,9 @@ def test(cap1: Cap, cap2: Cap) = def g(x: String): String = if cap2 == cap2 then "" else "a" val xs = new LazyCons("")(() => if f("") == f("") then LazyNil else LazyNil) - val xsc: {cap1} LazyList[String] = xs + val xsc: LazyList[String]^{cap1} = xs val ys = xs.map(g) - val ysc: {cap1, cap2} LazyList[String] = ys + val ysc: LazyList[String]^{cap1, cap2} = ys val zs = new LazyCons("")(() => if g("") == g("") then LazyNil else LazyNil) val as = xs.concat(zs) - val asc: {xs, zs} LazyList[String] = as + val asc: LazyList[String]^{xs, zs} = as diff --git a/tests/pos-custom-args/captures/lazyref.scala b/tests/pos-custom-args/captures/lazyref.scala index 0d988dc3e17b..3dae51b491b4 100644 --- a/tests/pos-custom-args/captures/lazyref.scala +++ b/tests/pos-custom-args/captures/lazyref.scala @@ -1,24 +1,24 @@ @annotation.capability class Cap class LazyRef[T](val elem: () => T): - val get: {elem} () -> T = elem - def map[U](f: T => U): {f, this} LazyRef[U] = + val get: () ->{elem} T = elem + def map[U](f: T => U): LazyRef[U]^{f, this} = new LazyRef(() => f(elem())) -def map[A, B](ref: {*} LazyRef[A], f: A => B): {f, ref} LazyRef[B] = +def map[A, B](ref: LazyRef[A]^, f: A => B): LazyRef[B]^{f, ref} = new LazyRef(() => f(ref.elem())) -def mapc[A, B]: (ref: {*} LazyRef[A], f: A => B) => {f, ref} LazyRef[B] = +def mapc[A, B]: (ref: LazyRef[A]^, f: A => B) => LazyRef[B]^{f, ref} = (ref1, f1) => map[A, B](ref1, f1) def test(cap1: Cap, cap2: Cap) = def f(x: Int) = if cap1 == cap1 then x else 0 def g(x: Int) = if cap2 == cap2 then x else 0 val ref1 = LazyRef(() => f(0)) - val ref1c: {cap1} LazyRef[Int] = ref1 + val ref1c: LazyRef[Int]^{cap1} = ref1 val ref2 = map(ref1, g) - val ref2c: {cap2, ref1} LazyRef[Int] = ref2 + val ref2c: LazyRef[Int]^{cap2, ref1} = ref2 val ref3 = ref1.map(g) - val ref3c: {cap2, ref1} LazyRef[Int] = ref3 + val ref3c: LazyRef[Int]^{cap2, ref1} = ref3 val ref4 = (if cap1 == cap2 then ref1 else ref2).map(g) - val ref4c: {cap1, cap2} LazyRef[Int] = ref4 + val ref4c: LazyRef[Int]^{cap1, cap2} = ref4 diff --git a/tests/pos-custom-args/captures/list-encoding.scala b/tests/pos-custom-args/captures/list-encoding.scala index 87630467023e..d959b523404b 100644 --- a/tests/pos-custom-args/captures/list-encoding.scala +++ b/tests/pos-custom-args/captures/list-encoding.scala @@ -7,7 +7,7 @@ type Op[T, C] = (v: T) => (s: C) => C type List[T] = - [C] -> (op: Op[T, C]) -> {op} (s: C) -> C + [C] -> (op: Op[T, C]) -> (s: C) ->{op} C def nil[T]: List[T] = [C] => (op: Op[T, C]) => (s: C) => s @@ -15,7 +15,7 @@ def nil[T]: List[T] = def cons[T](hd: T, tl: List[T]): List[T] = [C] => (op: Op[T, C]) => (s: C) => op(hd)(tl(op)(s)) -def foo(c: {*} Cap) = +def foo(c: Cap^) = def f(x: String @retains(c), y: String @retains(c)) = cons(x, cons(y, nil)) def g(x: String @retains(c), y: Any) = diff --git a/tests/pos-custom-args/captures/lists.scala b/tests/pos-custom-args/captures/lists.scala index 6389ec933b32..56473e68d49f 100644 --- a/tests/pos-custom-args/captures/lists.scala +++ b/tests/pos-custom-args/captures/lists.scala @@ -2,7 +2,7 @@ abstract class LIST[+T]: def isEmpty: Boolean def head: T def tail: LIST[T] - def map[U](f: {*} T -> U): LIST[U] = + def map[U](f: T => U): LIST[U] = if isEmpty then NIL else CONS(f(head), tail.map(f)) @@ -28,9 +28,9 @@ def test(c: Cap, d: Cap, e: Cap) = val zs = val z = g CONS(z, ys) - val zsc: LIST[{d, y} Cap -> Unit] = zs + val zsc: LIST[Cap ->{d, y} Unit] = zs val z1 = zs.head - val z1c: {y, d} Cap -> Unit = z1 + val z1c: Cap ->{y, d} Unit = z1 val ys1 = zs.tail val y1 = ys1.head @@ -38,53 +38,53 @@ def test(c: Cap, d: Cap, e: Cap) = def m1[A, B] = (f: A => B) => (xs: LIST[A]) => xs.map(f) - def m1c: (f: String => Int) -> {f} LIST[String] -> LIST[Int] = m1[String, Int] + def m1c: (f: String => Int) -> LIST[String] ->{f} LIST[Int] = m1[String, Int] def m2 = [A, B] => (f: A => B) => (xs: LIST[A]) => xs.map(f) - def m2c: [A, B] -> (f: A => B) -> {f} LIST[A] -> LIST[B] = m2 + def m2c: [A, B] -> (f: A => B) -> LIST[A] ->{f} LIST[B] = m2 def eff[A](x: A) = if x == e then x else x val eff2 = [A] => (x: A) => if x == e then x else x - val a0 = identity[{d, y} Cap -> Unit] - val a0c: {d, y} ({d, y} Cap -> Unit) -> {d, y} Cap -> Unit = a0 - val a1 = zs.map[{d, y} Cap -> Unit](a0) - val a1c: LIST[{d, y} Cap -> Unit] = a1 - val a2 = zs.map[{d, y} Cap -> Unit](identity[{d, y} Cap -> Unit]) - val a2c: LIST[{d, y} Cap -> Unit] = a2 - val a3 = zs.map(identity[{d, y} Cap -> Unit]) - val a3c: LIST[{d, y} Cap -> Unit] = a3 + val a0 = identity[Cap ->{d, y} Unit] + val a0c: (Cap ->{d, y} Unit) ->{d, y} Cap ->{d, y} Unit = a0 + val a1 = zs.map[Cap ->{d, y} Unit](a0) + val a1c: LIST[Cap ->{d, y} Unit] = a1 + val a2 = zs.map[Cap ->{d, y} Unit](identity[Cap ->{d, y} Unit]) + val a2c: LIST[Cap ->{d, y} Unit] = a2 + val a3 = zs.map(identity[Cap ->{d, y} Unit]) + val a3c: LIST[Cap ->{d, y} Unit] = a3 val a4 = zs.map(identity) - val a4c: LIST[{d, c} Cap -> Unit] = a4 - val a5 = map[{d, y} Cap -> Unit, {d, y} Cap -> Unit](identity)(zs) - val a5c: LIST[{d, c} Cap -> Unit] = a5 - val a6 = m1[{d, y} Cap -> Unit, {d, y} Cap -> Unit](identity)(zs) - val a6c: LIST[{d, c} Cap -> Unit] = a6 + val a4c: LIST[Cap ->{d, c} Unit] = a4 + val a5 = map[Cap ->{d, y} Unit, Cap ->{d, y} Unit](identity)(zs) + val a5c: LIST[Cap ->{d, c} Unit] = a5 + val a6 = m1[Cap ->{d, y} Unit, Cap ->{d, y} Unit](identity)(zs) + val a6c: LIST[Cap ->{d, c} Unit] = a6 - val b0 = eff[{d, y} Cap -> Unit] - val b0c: {e, d, y} ({d, y} Cap -> Unit) -> {d, y} Cap -> Unit = b0 - val b1 = zs.map[{d, y} Cap -> Unit](a0) - val b1c: {e} LIST[{d, y} Cap -> Unit] = b1 - val b2 = zs.map[{d, y} Cap -> Unit](eff[{d, y} Cap -> Unit]) - val b2c: {e} LIST[{d, y} Cap -> Unit] = b2 - val b3 = zs.map(eff[{d, y} Cap -> Unit]) - val b3c: {e} LIST[{d, y} Cap -> Unit] = b3 + val b0 = eff[Cap ->{d, y} Unit] + val b0c: (Cap ->{d, y} Unit) ->{e, d, y} Cap ->{d, y} Unit = b0 + val b1 = zs.map[Cap ->{d, y} Unit](a0) + val b1c: LIST[Cap ->{d, y} Unit]^{e} = b1 + val b2 = zs.map[Cap ->{d, y} Unit](eff[Cap ->{d, y} Unit]) + val b2c: LIST[Cap ->{d, y} Unit]^{e} = b2 + val b3 = zs.map(eff[Cap ->{d, y} Unit]) + val b3c: LIST[Cap ->{d, y} Unit]^{e} = b3 val b4 = zs.map(eff) - val b4c: {e} LIST[{d, c} Cap -> Unit] = b4 - val b5 = map[{d, y} Cap -> Unit, {d, y} Cap -> Unit](eff)(zs) - val b5c: {e} LIST[{d, c} Cap -> Unit] = b5 - val b6 = m1[{d, y} Cap -> Unit, {d, y} Cap -> Unit](eff)(zs) - val b6c: {e} LIST[{d, c} Cap -> Unit] = b6 + val b4c: LIST[Cap ->{d, c} Unit]^{e} = b4 + val b5 = map[Cap ->{d, y} Unit, Cap ->{d, y} Unit](eff)(zs) + val b5c: LIST[Cap ->{d, c} Unit]^{e} = b5 + val b6 = m1[Cap ->{d, y} Unit, Cap ->{d, y} Unit](eff)(zs) + val b6c: LIST[Cap ->{d, c} Unit]^{e} = b6 - val c0 = eff2[{d, y} Cap -> Unit] - val c0c: {e, d, y} ({d, y} Cap -> Unit) -> {d, y} Cap -> Unit = c0 - val c1 = zs.map[{d, y} Cap -> Unit](a0) - val c1c: {e} LIST[{d, y} Cap -> Unit] = c1 - val c2 = zs.map[{d, y} Cap -> Unit](eff2[{d, y} Cap -> Unit]) - val c2c: {e} LIST[{d, y} Cap -> Unit] = c2 - val c3 = zs.map(eff2[{d, y} Cap -> Unit]) - val c3c: {e} LIST[{d, y} Cap -> Unit] = c3 + val c0 = eff2[Cap ->{d, y} Unit] + val c0c: (Cap ->{d, y} Unit) ->{e, d, y} Cap ->{d, y} Unit = c0 + val c1 = zs.map[Cap ->{d, y} Unit](a0) + val c1c: LIST[Cap ->{d, y} Unit]^{e} = c1 + val c2 = zs.map[Cap ->{d, y} Unit](eff2[Cap ->{d, y} Unit]) + val c2c: LIST[Cap ->{d, y} Unit]^{e} = c2 + val c3 = zs.map(eff2[Cap ->{d, y} Unit]) + val c3c: LIST[Cap ->{d, y} Unit]^{e} = c3 diff --git a/tests/pos-custom-args/captures/logger.scala b/tests/pos-custom-args/captures/logger.scala index e5b6c834ffe0..3f417da8c1be 100644 --- a/tests/pos-custom-args/captures/logger.scala +++ b/tests/pos-custom-args/captures/logger.scala @@ -7,9 +7,9 @@ class Logger(using fs: FileSystem): def log(s: String): Unit = ??? def test(using fs: FileSystem) = - val l: {fs} Logger = Logger(using fs) + val l: Logger^{fs} = Logger(using fs) l.log("hello world!") - val xs: {l} LazyList[Int] = + val xs: LazyList[Int]^{l} = LazyList.from(1) .map { i => l.log(s"computing elem # $i") @@ -19,25 +19,25 @@ def test(using fs: FileSystem) = trait LazyList[+A]: def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -final class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: +final class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: def isEmpty = false def head = x - def tail: {this} LazyList[T] = xs() + def tail: LazyList[T]^{this} = xs() end LazyCons extension [A](x: A) - def #::(xs1: => {*} LazyList[A]): {xs1} LazyList[A] = + def #::(xs1: => LazyList[A]^): LazyList[A]^{xs1} = LazyCons(x, () => xs1) -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {xs, f} LazyList[B] = +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{xs, f} = if xs.isEmpty then LazyNil else f(xs.head) #:: xs.tail.map(f) @@ -50,17 +50,17 @@ class Pair[+A, +B](x: A, y: B): def snd: B = y def test2(ct: CanThrow[Exception], fs: FileSystem) = - def x: {ct} Int -> String = ??? - def y: {fs} Logger = ??? + def x: Int ->{ct} String = ??? + def y: Logger^{fs} = ??? def p = Pair(x, y) def f = () => p.fst /* - val l1: {*} Int -> String = ??? - val l2: {c} Object = ??? + val l1: Int => String = ??? + val l2: Object^{c} = ??? val pd = () => Pair(l1, l2) - val p2: Pair[{*} Int -> String, {c} Object] = pd() + val p2: Pair[Int => String, Object]^{c} = pd() val hd = () => p2.fst */ \ No newline at end of file diff --git a/tests/pos-custom-args/captures/matchtypes.scala b/tests/pos-custom-args/captures/matchtypes.scala new file mode 100644 index 000000000000..b2442277f1f7 --- /dev/null +++ b/tests/pos-custom-args/captures/matchtypes.scala @@ -0,0 +1,10 @@ +type HEAD[X <: NonEmptyTuple] = X match { + case x *: (_ <: NonEmptyTuple) => x +} + +inline def head[A <: NonEmptyTuple](x: A): HEAD[A] = null.asInstanceOf[HEAD[A]] + +def show[A, T <: Tuple](x: A *: T) = + show1(head(x)) + show1(x.head) +def show1[A](x: A): String = ??? \ No newline at end of file diff --git a/tests/pos-custom-args/captures/nested-classes.scala b/tests/pos-custom-args/captures/nested-classes.scala new file mode 100644 index 000000000000..b16fc4365183 --- /dev/null +++ b/tests/pos-custom-args/captures/nested-classes.scala @@ -0,0 +1,21 @@ +import language.experimental.captureChecking +import annotation.{capability, constructorOnly} + +@capability class IO +class Blah +class Pkg(using @constructorOnly io: IO): + class Foo: + def m(foo: Blah^{io}) = ??? +class Pkg2(using io: IO): + class Foo: + def m(foo: Blah^{io}): Any = io; ??? + +def main(using io: IO) = + val pkg = Pkg() + val f = pkg.Foo() + f.m(???) + val pkg2 = Pkg2() + val f2 = pkg2.Foo() + f2.m(???) + + diff --git a/tests/pos-custom-args/captures/nonvariant-inf.scala b/tests/pos-custom-args/captures/nonvariant-inf.scala index 6569f35042e8..4798f98c9fce 100644 --- a/tests/pos-custom-args/captures/nonvariant-inf.scala +++ b/tests/pos-custom-args/captures/nonvariant-inf.scala @@ -3,7 +3,7 @@ trait Iterable[+A] /** Base trait for instances that can construct a collection from an iterable */ trait FromIterable { - type C[X] <: {*} Iterable[X] - def fromIterable[B](it: {*} Iterable[B]): {it} C[B] + type C[X] <: Iterable[X]^ + def fromIterable[B](it: Iterable[B]^): C[B]^{it} def empty[A]: C[A] = fromIterable(??? : Iterable[A]) } diff --git a/tests/pos-custom-args/captures/override-adapt-box-pos-alt.scala b/tests/pos-custom-args/captures/override-adapt-box-pos-alt.scala new file mode 100644 index 000000000000..bb6b4030dbff --- /dev/null +++ b/tests/pos-custom-args/captures/override-adapt-box-pos-alt.scala @@ -0,0 +1,17 @@ +import language.experimental.captureChecking + +class IO + +abstract class A[X] { + def foo(x: Unit): X + def bar(op: X => Int): Int +} + +class C + +def test(io: IO^) = { + class B extends A[C^{io}] { // X =:= {io} C + def foo(x: Unit): C^{io} = ??? + def bar(op: (C^{io}) => Int): Int = 0 + } +} diff --git a/tests/pos-custom-args/captures/override-adapt-box-pos.scala b/tests/pos-custom-args/captures/override-adapt-box-pos.scala new file mode 100644 index 000000000000..9adaec6896cf --- /dev/null +++ b/tests/pos-custom-args/captures/override-adapt-box-pos.scala @@ -0,0 +1,19 @@ +import language.experimental.captureChecking + +class IO + +abstract class A[X, Y] { + def foo(x: Unit): X + def bar(x: Int, y: IO^{}): X + def baz(x: Y): X +} + +class C + +def test(io: IO^) = { + class B extends A[C^{io}, C^{}] { // X =:= {io} C + override def foo(x: Unit): C^{io} = ??? + override def bar(x: Int, y: IO^{}): C^{io} = ??? + override def baz(x: C^{}): C^{io} = ??? + } +} diff --git a/tests/pos-custom-args/captures/overrides.scala b/tests/pos-custom-args/captures/overrides.scala index 66f19726ffa7..ac5b9cd9ddc4 100644 --- a/tests/pos-custom-args/captures/overrides.scala +++ b/tests/pos-custom-args/captures/overrides.scala @@ -1,4 +1,4 @@ -import caps.* +import caps.cap abstract class Foo: def foo: () => Unit = () => () @@ -12,15 +12,3 @@ class Bar extends Foo: class Baz extends Bar: override def foo = () => println("baz") override def bar = "baz" - //override def toString = bar - -abstract class Message: - protected def msg: String - override def toString = msg - -abstract class SyntaxMsg extends Message - -class CyclicInheritance extends SyntaxMsg: - def msg = "cyclic" - - diff --git a/tests/pos-custom-args/captures/overrides/A.scala b/tests/pos-custom-args/captures/overrides/A.scala new file mode 100644 index 000000000000..6a81f8562164 --- /dev/null +++ b/tests/pos-custom-args/captures/overrides/A.scala @@ -0,0 +1,4 @@ +abstract class Message: + lazy val message: String = ??? + def rawMessage = message + diff --git a/tests/pos-custom-args/captures/overrides/B.scala b/tests/pos-custom-args/captures/overrides/B.scala new file mode 100644 index 000000000000..ce4a3f20f1d2 --- /dev/null +++ b/tests/pos-custom-args/captures/overrides/B.scala @@ -0,0 +1,6 @@ + +abstract class SyntaxMsg extends Message + +class CyclicInheritance extends SyntaxMsg + + diff --git a/tests/pos-custom-args/captures/pairs.scala b/tests/pos-custom-args/captures/pairs.scala index 9c8ec003d28d..bc20d20ffd92 100644 --- a/tests/pos-custom-args/captures/pairs.scala +++ b/tests/pos-custom-args/captures/pairs.scala @@ -12,21 +12,21 @@ object Generic: def g(x: Cap): Unit = if d == x then () val p = Pair(f, g) val x1 = p.fst - val x1c: {c} Cap -> Unit = x1 + val x1c: Cap ->{c} Unit = x1 val y1 = p.snd - val y1c: {d} Cap -> Unit = y1 + val y1c: Cap ->{d} Unit = y1 object Monomorphic: - class Pair(x: Cap => Unit, y: {*} Cap -> Unit): - def fst: {x} Cap -> Unit = x - def snd: {y} Cap -> Unit = y + class Pair(x: Cap => Unit, y: Cap => Unit): + def fst: Cap ->{x} Unit = x + def snd: Cap ->{y} Unit = y def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () def g(x: Cap): Unit = if d == x then () val p = Pair(f, g) val x1 = p.fst - val x1c: {c} Cap -> Unit = x1 + val x1c: Cap ->{c} Unit = x1 val y1 = p.snd - val y1c: {d} Cap -> Unit = y1 + val y1c: Cap ->{d} Unit = y1 diff --git a/tests/pos-custom-args/captures/selftype-alias.scala b/tests/pos-custom-args/captures/selftype-alias.scala new file mode 100644 index 000000000000..180c7b27b146 --- /dev/null +++ b/tests/pos-custom-args/captures/selftype-alias.scala @@ -0,0 +1,8 @@ +import language.experimental.captureChecking + +type AnyIterableOnce[A] = IterableOnce[A]^ + +/** Iterator can be used only once */ +trait IterableOnce[+A]: + this: AnyIterableOnce[A] => + def iterator: Iterator[A]^{this} diff --git a/tests/pos-custom-args/captures/selftypes.scala b/tests/pos-custom-args/captures/selftypes.scala new file mode 100644 index 000000000000..c1b8eefce506 --- /dev/null +++ b/tests/pos-custom-args/captures/selftypes.scala @@ -0,0 +1,15 @@ + import annotation.constructorOnly + trait A: + self: A => + def foo: Int + + abstract class B extends A: + def foo: Int + + class C extends B: + def foo = 1 + def derived = this + + class D(@constructorOnly op: Int => Int) extends C: + val x = 1//op(1) + diff --git a/tests/pos-custom-args/captures/stack-alloc.scala b/tests/pos-custom-args/captures/stack-alloc.scala index 03b6708a3119..7013f978c281 100644 --- a/tests/pos-custom-args/captures/stack-alloc.scala +++ b/tests/pos-custom-args/captures/stack-alloc.scala @@ -5,7 +5,7 @@ class Pooled val stack = mutable.ArrayBuffer[Pooled]() var nextFree = 0 -def withFreshPooled[T](op: ({*} Pooled) => T): T = +def withFreshPooled[T](op: Pooled^ => T): T = if nextFree >= stack.size then stack.append(new Pooled) val pooled = stack(nextFree) nextFree = nextFree + 1 diff --git a/tests/pos-custom-args/captures/trickyTrailingUpArrow.scala b/tests/pos-custom-args/captures/trickyTrailingUpArrow.scala new file mode 100644 index 000000000000..71b663de5354 --- /dev/null +++ b/tests/pos-custom-args/captures/trickyTrailingUpArrow.scala @@ -0,0 +1,9 @@ +object Test: + var x = 0 + type FreshContext = String^ + x += 1 + + inline def ctx(using c: String) = c + + val y: String^ -> Unit = ??? + val z: String^ ?-> Unit = ??? diff --git a/tests/pos-custom-args/captures/try.scala b/tests/pos-custom-args/captures/try.scala index b2dcf6f11dd0..05c41be69001 100644 --- a/tests/pos-custom-args/captures/try.scala +++ b/tests/pos-custom-args/captures/try.scala @@ -2,7 +2,7 @@ import annotation.retains import language.experimental.erasedDefinitions class CT[E <: Exception] -type CanThrow[E <: Exception] = CT[E] @retains(caps.*) +type CanThrow[E <: Exception] = CT[E] @retains(caps.cap) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?-> R diff --git a/tests/pos-custom-args/captures/unsafe-unbox.scala b/tests/pos-custom-args/captures/unsafe-unbox.scala index e846a7db1b69..63a32d58f640 100644 --- a/tests/pos-custom-args/captures/unsafe-unbox.scala +++ b/tests/pos-custom-args/captures/unsafe-unbox.scala @@ -1,4 +1,7 @@ -import caps.* +import annotation.unchecked.uncheckedCaptures def test = + @uncheckedCaptures var finalizeActions = collection.mutable.ListBuffer[() => Unit]() - val action = finalizeActions.remove(0).unsafeUnbox + val action = finalizeActions.remove(0) + + diff --git a/tests/pos-custom-args/captures/vars.scala b/tests/pos-custom-args/captures/vars.scala index 12721158a2bb..ccf2cd587eb1 100644 --- a/tests/pos-custom-args/captures/vars.scala +++ b/tests/pos-custom-args/captures/vars.scala @@ -5,13 +5,13 @@ def test(cap1: Cap, cap2: Cap) = var x = f val y = x val z = () => if x("") == "" then "a" else "b" - val zc: {cap1} () -> String = z + val zc: () ->{cap1} String = z val z2 = () => { x = identity } - val z2c: {cap1} () -> Unit = z2 + val z2c: () ->{cap1} Unit = z2 class Ref: - var elem: {cap1} String -> String = null + var elem: String ->{cap1} String = null val r = Ref() r.elem = f - val fc: {cap1} String -> String = r.elem + val fc: String ->{cap1} String = r.elem diff --git a/tests/pos-custom-args/captures/vars1.scala b/tests/pos-custom-args/captures/vars1.scala index 8c2f2cb8b5d5..56548e5a9c30 100644 --- a/tests/pos-custom-args/captures/vars1.scala +++ b/tests/pos-custom-args/captures/vars1.scala @@ -1,9 +1,12 @@ -import caps.* +import caps.unsafe.* +import annotation.unchecked.uncheckedCaptures object Test: type ErrorHandler = (Int, String) => Unit + @uncheckedCaptures var defaultIncompleteHandler: ErrorHandler = ??? + @uncheckedCaptures var incompleteHandler: ErrorHandler = defaultIncompleteHandler val x = incompleteHandler.unsafeUnbox val _ : ErrorHandler = x @@ -11,15 +14,17 @@ object Test: def defaultIncompleteHandler1(): ErrorHandler = ??? val defaultIncompleteHandler2: ErrorHandler = ??? + @uncheckedCaptures var incompleteHandler1: ErrorHandler = defaultIncompleteHandler1() + @uncheckedCaptures var incompleteHandler2: ErrorHandler = defaultIncompleteHandler2 - var incompleteHandler3: ErrorHandler = defaultIncompleteHandler1().unsafeBox - var incompleteHandler4: ErrorHandler = defaultIncompleteHandler2.unsafeBox - private var incompleteHandler5 = defaultIncompleteHandler1() - private var incompleteHandler6 = defaultIncompleteHandler2 - private var incompleteHandler7 = defaultIncompleteHandler1().unsafeBox - private var incompleteHandler8 = defaultIncompleteHandler2.unsafeBox + @uncheckedCaptures + private var incompleteHandler7 = defaultIncompleteHandler1() + @uncheckedCaptures + private var incompleteHandler8 = defaultIncompleteHandler2 incompleteHandler1 = defaultIncompleteHandler2 - incompleteHandler1 = defaultIncompleteHandler2.unsafeBox - val saved = incompleteHandler1.unsafeUnbox + incompleteHandler1 = defaultIncompleteHandler2 + val saved = incompleteHandler1 + + diff --git a/tests/pos-custom-args/erased/erased-class-as-args.scala b/tests/pos-custom-args/erased/erased-class-as-args.scala new file mode 100644 index 000000000000..74c827fbd54b --- /dev/null +++ b/tests/pos-custom-args/erased/erased-class-as-args.scala @@ -0,0 +1,22 @@ +erased class A + +erased class B(val x: Int) extends A + +type T = (x: A, y: Int) => Int + +type TSub[-T <: A] = (erased x: T, y: Int) => Int + +def useT(f: T) = f(new A, 5) + +def useTSub(f: TSub[B]) = f(new B(5), 5) + +@main def Test() = + val tInfer = (x: A, y: Int) => y + 1 + val tExpl: T = (x, y) => y + 1 + assert(useT((erased x, y) => y + 1) == 6) + assert(useT(tInfer) == 6) + assert(useT(tExpl) == 6) + + val tSub: TSub[A] = (x, y) => y + 1 + assert(useT(tSub) == 6) + assert(useTSub(tSub) == 6) diff --git a/tests/pos-custom-args/erased/erased-soft-keyword.scala b/tests/pos-custom-args/erased/erased-soft-keyword.scala new file mode 100644 index 000000000000..fdb884628c7d --- /dev/null +++ b/tests/pos-custom-args/erased/erased-soft-keyword.scala @@ -0,0 +1,18 @@ +def f1(x: Int, erased y: Int) = 0 +def f2(x: Int, erased: Int) = 0 +inline def f3(x: Int, inline erased: Int) = 0 +def f4(x: Int, erased inline: Int) = 0 +// inline def f5(x: Int, erased inline y: Int) = 0 // should parse but rejected later + +def f6(using erased y: Int) = 0 +def f7(using erased: Int) = 0 +inline def f8(using inline erased: Int) = 0 +def f9(using erased inline: Int) = 0 +// inline def f10(using erased inline x: Int) = 0 // should parse but rejected later +def f11(using erased Int) = 0 + +val v1 = (erased: Int) => 0 +val v2: Int => Int = erased => 0 +val v3 = (erased x: Int) => 0 +val v4: (erased Int) => Int = (erased x) => 0 +val v5: (erased: Int) => Int = x => 0 diff --git a/tests/pos-custom-args/erased/tailrec.scala b/tests/pos-custom-args/erased/tailrec.scala new file mode 100644 index 000000000000..cebcf4785c7a --- /dev/null +++ b/tests/pos-custom-args/erased/tailrec.scala @@ -0,0 +1,20 @@ +import scala.annotation.tailrec + +erased class Foo1 +class Foo2 + +@tailrec +final def test1(n: Int, acc: Int): (Foo1, Foo2) ?=> Int = + if n <= 0 then acc + else test1(n - 1, acc * n) + +@tailrec +final def test2(n: Int, acc: Int): Foo1 ?=> Int = + if n <= 0 then acc + else test2(n - 1, acc * n) + +@main def Test() = + given Foo1 = Foo1() + given Foo2 = Foo2() + test1(10, 0) + test2(10, 0) diff --git a/tests/pos-custom-args/no-experimental/dotty-experimental.scala b/tests/pos-custom-args/no-experimental/dotty-experimental.scala index 74e79c85eaaa..72d16ddd9b15 100644 --- a/tests/pos-custom-args/no-experimental/dotty-experimental.scala +++ b/tests/pos-custom-args/no-experimental/dotty-experimental.scala @@ -1,6 +1,6 @@ package dotty.tools object test { - val x = caps.unsafeBox + val x = caps.cap } diff --git a/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala b/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala index bb27629a6062..998086c5d9a4 100644 --- a/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala +++ b/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala @@ -1,5 +1,4 @@ import annotation.experimental -import language.experimental.fewerBraces import language.experimental.namedTypeArguments import language.experimental.genericNumberLiterals import language.experimental.erasedDefinitions diff --git a/tests/pos-special/adhoc-extension/A.scala b/tests/pos-custom-args/strict/adhoc-extension/A.scala similarity index 100% rename from tests/pos-special/adhoc-extension/A.scala rename to tests/pos-custom-args/strict/adhoc-extension/A.scala diff --git a/tests/pos-special/adhoc-extension/B.scala b/tests/pos-custom-args/strict/adhoc-extension/B.scala similarity index 100% rename from tests/pos-special/adhoc-extension/B.scala rename to tests/pos-custom-args/strict/adhoc-extension/B.scala diff --git a/tests/pos-custom-args/i10383.scala b/tests/pos-custom-args/strict/i10383.scala similarity index 100% rename from tests/pos-custom-args/i10383.scala rename to tests/pos-custom-args/strict/i10383.scala diff --git a/tests/pos-custom-args/strict/i16808.scala b/tests/pos-custom-args/strict/i16808.scala new file mode 100644 index 000000000000..602ceed94161 --- /dev/null +++ b/tests/pos-custom-args/strict/i16808.scala @@ -0,0 +1,2 @@ +def collectKeys[A, B, C](xs: Map[A, B])(f: PartialFunction[A, C]): Map[C, B] = + xs.collect{ case (f(c) , b) => (c, b) } \ No newline at end of file diff --git a/tests/pos-special/i7296.scala b/tests/pos-custom-args/strict/i7296.scala similarity index 100% rename from tests/pos-special/i7296.scala rename to tests/pos-custom-args/strict/i7296.scala diff --git a/tests/pos-java16+/java-records/FromScala.scala b/tests/pos-java16+/java-records/FromScala.scala new file mode 100644 index 000000000000..67747e658432 --- /dev/null +++ b/tests/pos-java16+/java-records/FromScala.scala @@ -0,0 +1,43 @@ +object C: + def useR1: Unit = + // constructor signature + val r = R1(123, "hello") + + // accessors + val i: Int = r.i + val s: String = r.s + + // methods + val iRes: Int = r.getInt() + val sRes: String = r.getString() + + // supertype + val record: java.lang.Record = r + + def useR2: Unit = + // constructor signature + val r2 = R2.R(123, "hello") + + // accessors signature + val i: Int = r2.i + val s: String = r2.s + + // method + val i2: Int = r2.getInt + + // supertype + val isIntLike: IntLike = r2 + val isRecord: java.lang.Record = r2 + + def useR3 = + // constructor signature + val r3 = R3(123, 42L, "hi") + new R3("hi", 123) + // accessors signature + val i: Int = r3.i + val l: Long = r3.l + val s: String = r3.s + // method + val l2: Long = r3.l(43L, 44L) + // supertype + val isRecord: java.lang.Record = r3 diff --git a/tests/pos-java16+/java-records/IntLike.scala b/tests/pos-java16+/java-records/IntLike.scala new file mode 100644 index 000000000000..1f760018a975 --- /dev/null +++ b/tests/pos-java16+/java-records/IntLike.scala @@ -0,0 +1,2 @@ +trait IntLike: + def getInt: Int diff --git a/tests/pos-java16+/java-records/R1.java b/tests/pos-java16+/java-records/R1.java new file mode 100644 index 000000000000..832d288547ab --- /dev/null +++ b/tests/pos-java16+/java-records/R1.java @@ -0,0 +1,9 @@ +public record R1(int i, String s) { + public String getString() { + return s + i; + } + + public int getInt() { + return 0; + } +} diff --git a/tests/pos-java16+/java-records/R2.java b/tests/pos-java16+/java-records/R2.java new file mode 100644 index 000000000000..4b3f881628b9 --- /dev/null +++ b/tests/pos-java16+/java-records/R2.java @@ -0,0 +1,13 @@ +public class R2 { + final record R(int i, String s) implements IntLike { + public int getInt() { + return i; + } + + // Canonical constructor + public R(int i, java.lang.String s) { + this.i = i; + this.s = s.intern(); + } + } +} diff --git a/tests/pos-java16+/java-records/R3.java b/tests/pos-java16+/java-records/R3.java new file mode 100644 index 000000000000..616481a0ae1f --- /dev/null +++ b/tests/pos-java16+/java-records/R3.java @@ -0,0 +1,22 @@ +public record R3(int i, long l, String s) { + + // User-specified accessor + public int i() { + return i + 1; // evil >:) + } + + // Not an accessor - too many parameters + public long l(long a1, long a2) { + return a1 + a2; + } + + // Secondary constructor + public R3(String s, int i) { + this(i, 42L, s); + } + + // Compact constructor + public R3 { + s = s.intern(); + } +} \ No newline at end of file diff --git a/tests/pos-macros/annot-in-object/Macro_1.scala b/tests/pos-macros/annot-in-object/Macro_1.scala new file mode 100644 index 000000000000..52c5daec1f29 --- /dev/null +++ b/tests/pos-macros/annot-in-object/Macro_1.scala @@ -0,0 +1,12 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +object Foo: + @experimental + class void extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) + + object Bar: + @experimental + class void extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) diff --git a/tests/pos-macros/annot-in-object/Test_2.scala b/tests/pos-macros/annot-in-object/Test_2.scala new file mode 100644 index 000000000000..4fc43d4f2e41 --- /dev/null +++ b/tests/pos-macros/annot-in-object/Test_2.scala @@ -0,0 +1,3 @@ +@Foo.void +@Foo.Bar.void +def test = 0 diff --git a/tests/pos-macros/annot-suspend/Macro_1.scala b/tests/pos-macros/annot-suspend/Macro_1.scala new file mode 100644 index 000000000000..afbf05e568c7 --- /dev/null +++ b/tests/pos-macros/annot-suspend/Macro_1.scala @@ -0,0 +1,7 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class void extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + List(tree) diff --git a/tests/pos-macros/annot-suspend/Test_2.scala b/tests/pos-macros/annot-suspend/Test_2.scala new file mode 100644 index 000000000000..ee8529fa4414 --- /dev/null +++ b/tests/pos-macros/annot-suspend/Test_2.scala @@ -0,0 +1,2 @@ +@void +def test = 0 diff --git a/tests/pos-macros/annot-then-inline/Macro_1.scala b/tests/pos-macros/annot-then-inline/Macro_1.scala new file mode 100644 index 000000000000..8e966be862cd --- /dev/null +++ b/tests/pos-macros/annot-then-inline/Macro_1.scala @@ -0,0 +1,16 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class useInlinedIdentity extends MacroAnnotation { + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect.* + tree match + case DefDef(name, params, tpt, Some(rhs)) => + val newRhs = + given Quotes = tree.symbol.asQuotes + '{ inlinedIdentity(${rhs.asExpr}) }.asTerm + List(DefDef.copy(tree)(name, params, tpt, Some(newRhs))) +} + +inline def inlinedIdentity(x: Any): x.type = x diff --git a/tests/pos-macros/annot-then-inline/Test_2.scala b/tests/pos-macros/annot-then-inline/Test_2.scala new file mode 100644 index 000000000000..3e72fcaaae1d --- /dev/null +++ b/tests/pos-macros/annot-then-inline/Test_2.scala @@ -0,0 +1,2 @@ +@useInlinedIdentity +def test = 0 diff --git a/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala b/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala new file mode 100644 index 000000000000..72bcbe8b6515 --- /dev/null +++ b/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala @@ -0,0 +1,13 @@ +import scala.compiletime.{erasedValue, summonFrom} + +import scala.quoted._ + +inline given summonAfterTypeMatch[T]: Any = + ${ summonAfterTypeMatchExpr[T] } + +private def summonAfterTypeMatchExpr[T: Type](using Quotes): Expr[Any] = + Expr.summon[Foo[T]].get + +trait Foo[T] + +given IntFoo[T <: Int]: Foo[T] = ??? diff --git a/tests/pos-macros/exprSummonWithTypeVar/Test_2.scala b/tests/pos-macros/exprSummonWithTypeVar/Test_2.scala new file mode 100644 index 000000000000..dbf2fd88fe24 --- /dev/null +++ b/tests/pos-macros/exprSummonWithTypeVar/Test_2.scala @@ -0,0 +1 @@ +def test: Unit = summonAfterTypeMatch[Int] diff --git a/tests/pos-macros/hk-quoted-type-patterns/Macro_1.scala b/tests/pos-macros/hk-quoted-type-patterns/Macro_1.scala new file mode 100644 index 000000000000..0d2df1504918 --- /dev/null +++ b/tests/pos-macros/hk-quoted-type-patterns/Macro_1.scala @@ -0,0 +1,17 @@ +import scala.quoted._ + +private def impl(x: Expr[Any])(using Quotes): Expr[Unit] = { + x match + case '{ foo[x] } => + assert(Type.show[x] == "scala.Int", Type.show[x]) + case '{ type f[X]; foo[`f`] } => + assert(Type.show[f] == "[A >: scala.Nothing <: scala.Any] => scala.collection.immutable.List[A]", Type.show[f]) + case '{ type f <: AnyKind; foo[`f`] } => + assert(Type.show[f] == "[K >: scala.Nothing <: scala.Any, V >: scala.Nothing <: scala.Any] => scala.collection.immutable.Map[K, V]", Type.show[f]) + case x => throw MatchError(x.show) + '{} +} + +inline def test(inline x: Any): Unit = ${ impl('x) } + +def foo[T <: AnyKind]: Any = ??? diff --git a/tests/pos-macros/hk-quoted-type-patterns/Test_2.scala b/tests/pos-macros/hk-quoted-type-patterns/Test_2.scala new file mode 100644 index 000000000000..3cb9113f2452 --- /dev/null +++ b/tests/pos-macros/hk-quoted-type-patterns/Test_2.scala @@ -0,0 +1,5 @@ +@main +def Test = + test(foo[Int]) + test(foo[List]) + test(foo[Map]) diff --git a/tests/pos-macros/i10127-a.scala b/tests/pos-macros/i10127-a.scala new file mode 100644 index 000000000000..3b9efc2a829d --- /dev/null +++ b/tests/pos-macros/i10127-a.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +object T { + def impl[A](using t: Type[A])(using Quotes): Expr[Unit] = { + Expr.summon[t.Underlying] + '{} + } +} \ No newline at end of file diff --git a/tests/pos-macros/i11211.scala b/tests/pos-macros/i11211.scala index 2650fa754193..154d8df174e7 100644 --- a/tests/pos-macros/i11211.scala +++ b/tests/pos-macros/i11211.scala @@ -12,7 +12,7 @@ def takeOptionImpl2[T](using Quotes, Type[T]): Unit = '{ def takeOptionImpl[T](o: Expr[Option[T]], default: Expr[T])(using Quotes, Type[T]): Expr[T] = '{ $o match { case Some(t1) => t1 - case None: Option[T] => $default + case None => $default } } diff --git a/tests/pos-macros/i12440.scala b/tests/pos-macros/i12440.scala index 4b4c56fef568..02122be28deb 100644 --- a/tests/pos-macros/i12440.scala +++ b/tests/pos-macros/i12440.scala @@ -5,6 +5,12 @@ trait Mirror: class Eq: + def test0(using Quotes): Unit = '{ + type T + ${ summonType[T]; ??? } + ${ summonType[List[T]]; ??? } + } + def test1(using Quotes): Unit = '{ val m: Mirror = ??? ${ summonType[m.ElemTypes]; ??? } diff --git a/tests/pos-macros/i13376a.scala b/tests/pos-macros/i13376a.scala new file mode 100644 index 000000000000..8e746d0e34a8 --- /dev/null +++ b/tests/pos-macros/i13376a.scala @@ -0,0 +1,7 @@ +import scala.quoted.* +trait C: + type T + def foo: T +inline def makro(x: C): x.T = ${ impl[x.T]('x) } +def impl[U: Type](xp: Expr[C { def foo: U }])(using Quotes): Expr[U] = + '{ $xp.foo } diff --git a/tests/pos-macros/i13376b.scala b/tests/pos-macros/i13376b.scala new file mode 100644 index 000000000000..8aa171ff07dd --- /dev/null +++ b/tests/pos-macros/i13376b.scala @@ -0,0 +1,7 @@ +import scala.quoted.* +trait C: + type T + def foo: T +inline def makro(inline x: C): C#T = ${ impl('x) } +def impl[U: Type](xp: Expr[C { def foo: U }])(using Quotes): Expr[U] = + '{ $xp.foo } diff --git a/tests/pos-macros/i14131.scala b/tests/pos-macros/i14131.scala new file mode 100644 index 000000000000..76c01839a17f --- /dev/null +++ b/tests/pos-macros/i14131.scala @@ -0,0 +1,11 @@ +class Dog: + inline given bark(using msg: String = "Woof!"): String = s"bark: $msg" + +class Wolf: + private val dog: Dog = Dog() + export dog.given + +def test = + val w = Wolf() + import w.given + summon[String] diff --git a/tests/pos-macros/i15165a/Macro_1.scala b/tests/pos-macros/i15165a/Macro_1.scala new file mode 100644 index 000000000000..8838d4c06bd1 --- /dev/null +++ b/tests/pos-macros/i15165a/Macro_1.scala @@ -0,0 +1,9 @@ +import scala.quoted.* + +inline def valToFun[T](inline expr: T): T = + ${ impl('expr) } + +def impl[T: Type](expr: Expr[T])(using quotes: Quotes): Expr[T] = + expr match + case '{ { val ident = ($a: α); $rest(ident): T } } => + '{ { (y: α) => $rest(y) }.apply(???) } diff --git a/tests/pos-macros/i15165a/Test_2.scala b/tests/pos-macros/i15165a/Test_2.scala new file mode 100644 index 000000000000..f7caa67b2df7 --- /dev/null +++ b/tests/pos-macros/i15165a/Test_2.scala @@ -0,0 +1,4 @@ +def test = valToFun { + val a: Int = 1 + a + 1 +} diff --git a/tests/pos-macros/i15165b/Macro_1.scala b/tests/pos-macros/i15165b/Macro_1.scala new file mode 100644 index 000000000000..5d62db37e313 --- /dev/null +++ b/tests/pos-macros/i15165b/Macro_1.scala @@ -0,0 +1,16 @@ +import scala.quoted.* + +inline def valToFun[T](inline expr: T): T = + ${ impl('expr) } + +def impl[T: Type](expr: Expr[T])(using quotes: Quotes): Expr[T] = + expr match + case '{ { val ident = ($a: α); $rest(ident): T } } => + '{ + { (y: α) => + ${ + val bound = '{ ${ rest }(y) } + Expr.betaReduce(bound) + } + }.apply($a) + } diff --git a/tests/pos-macros/i15165b/Test_2.scala b/tests/pos-macros/i15165b/Test_2.scala new file mode 100644 index 000000000000..f7caa67b2df7 --- /dev/null +++ b/tests/pos-macros/i15165b/Test_2.scala @@ -0,0 +1,4 @@ +def test = valToFun { + val a: Int = 1 + a + 1 +} diff --git a/tests/pos-macros/i15165c/Macro_1.scala b/tests/pos-macros/i15165c/Macro_1.scala new file mode 100644 index 000000000000..036363bf274f --- /dev/null +++ b/tests/pos-macros/i15165c/Macro_1.scala @@ -0,0 +1,9 @@ +import scala.quoted.* + +inline def valToFun[T](inline expr: T): T = + ${ impl('expr) } + +def impl[T: Type](expr: Expr[T])(using quotes: Quotes): Expr[T] = + expr match + case '{ type α; { val ident = ($a: `α`); $rest(ident): `α` & T } } => + '{ { (y: α) => $rest(y) }.apply(???) } diff --git a/tests/pos-macros/i15165c/Test_2.scala b/tests/pos-macros/i15165c/Test_2.scala new file mode 100644 index 000000000000..f7caa67b2df7 --- /dev/null +++ b/tests/pos-macros/i15165c/Test_2.scala @@ -0,0 +1,4 @@ +def test = valToFun { + val a: Int = 1 + a + 1 +} diff --git a/tests/pos-macros/i15475.scala b/tests/pos-macros/i15475.scala new file mode 100644 index 000000000000..20993cd46d15 --- /dev/null +++ b/tests/pos-macros/i15475.scala @@ -0,0 +1,13 @@ +def test = + transform { + val a: Seq[Generic[?]] = ??? + a.foreach { to => + to.mthd() + } + } + +transparent inline def transform[T](expr: T): T = ??? + +trait Generic[+T] { + def mthd(): Generic[T] = ??? +} diff --git a/tests/pos-macros/i15475a/Macro_1.scala b/tests/pos-macros/i15475a/Macro_1.scala new file mode 100644 index 000000000000..b1bd676e7e17 --- /dev/null +++ b/tests/pos-macros/i15475a/Macro_1.scala @@ -0,0 +1,17 @@ +package x + +import scala.quoted.* + + +transparent inline def xtransform[T](inline expr:T) = ${ + X.transform('expr) +} + +object X { + + def transform[T:Type](x: Expr[T])(using Quotes):Expr[T] = { + import quotes.reflect.* + x + } + +} diff --git a/tests/pos-macros/i15475a/Test_2.scala b/tests/pos-macros/i15475a/Test_2.scala new file mode 100644 index 000000000000..7757a14950de --- /dev/null +++ b/tests/pos-macros/i15475a/Test_2.scala @@ -0,0 +1,15 @@ +package x + +def hello = { + xtransform { + val a: Seq[Generic[?]] = null + a + .foreach { to => + to.mthd() + } + } +} + +trait Generic[+T] { + def mthd(): Generic[T] = this +} diff --git a/tests/pos-macros/i15709.scala b/tests/pos-macros/i15709.scala new file mode 100644 index 000000000000..845ed35d1a55 --- /dev/null +++ b/tests/pos-macros/i15709.scala @@ -0,0 +1,4 @@ +import quoted.* + +inline def foo(s: Singleton): Unit = ${ fooImpl('s) } +def fooImpl(s: Expr[Singleton])(using Quotes) = '{} diff --git a/tests/pos-macros/i15779/Macro_1.scala b/tests/pos-macros/i15779/Macro_1.scala new file mode 100644 index 000000000000..8bb98ab31553 --- /dev/null +++ b/tests/pos-macros/i15779/Macro_1.scala @@ -0,0 +1,30 @@ +import scala.quoted._ +import scala.deriving.Mirror + +trait Encoder[-A] + +trait PrimitiveEncoder[A] extends Encoder[A] + +given intOpt: PrimitiveEncoder[Option[Int]] with {} + +given primitiveNotNull[T](using e: Encoder[Option[T]]): PrimitiveEncoder[T] = + new PrimitiveEncoder[T] {} + +transparent inline given fromMirror[A]: Any = ${ fromMirrorImpl[A] } + +def fromMirrorImpl[A : Type](using q: Quotes): Expr[Any] = + Expr.summon[Mirror.Of[A]].get match + case '{ ${mirror}: Mirror.ProductOf[A] { type MirroredElemTypes = elementTypes } } => + val encoder = Type.of[elementTypes] match + case '[tpe *: EmptyTuple] => + Expr.summon[Encoder[tpe]].get + + encoder match + case '{ ${encoder}: Encoder[tpe] } => // ok + case _ => ??? + + encoder match + case '{ ${encoder}: Encoder[tpe] } => // ok + case _ => ??? + + encoder diff --git a/tests/pos-macros/i15779/Test_2.scala b/tests/pos-macros/i15779/Test_2.scala new file mode 100644 index 000000000000..c7223d849a86 --- /dev/null +++ b/tests/pos-macros/i15779/Test_2.scala @@ -0,0 +1,3 @@ +case class JustInt(i: Int) + +val x = fromMirror[JustInt] diff --git a/tests/pos-macros/i15985.scala b/tests/pos-macros/i15985.scala new file mode 100644 index 000000000000..cd8a726647f9 --- /dev/null +++ b/tests/pos-macros/i15985.scala @@ -0,0 +1,28 @@ +package anorm.macros +sealed trait Row +sealed trait SqlResult[A] + +import scala.quoted.{ Expr, Quotes, Type } + +private[anorm] object RowParserImpl { + def apply[A](using q:Quotes)(using a: Type[A]): Expr[Row => SqlResult[A]] = { + import q.reflect.* + + inline def f1: Expr[SqlResult[A]] = + Match(???, ???).asExprOf[SqlResult[A]] // (using Type.of[anorm.macros.SqlResult[A]] }) + + inline def f2: Expr[SqlResult[A]] = + Match(???, ???).asExprOf[SqlResult[A]](using Type.of[SqlResult[A]]) + // In Staging phase it becomes + // ..asExprOf[..](using Type.of[{ @SplicedType type a$_$3 = a.Underlying; anorm.macros.SqlResult[a$_$3] }]) + + inline def f3(using Type[SqlResult[A]]): Expr[SqlResult[A]] = + Match(???, ???).asExprOf[SqlResult[A]] + + f1 + f2 + f3 + + ??? + } +} diff --git a/tests/pos-macros/i16008/Macro_1.scala b/tests/pos-macros/i16008/Macro_1.scala new file mode 100644 index 000000000000..b1cc1e6b2b9d --- /dev/null +++ b/tests/pos-macros/i16008/Macro_1.scala @@ -0,0 +1,24 @@ +import scala.quoted.* + +enum MyEnum0: + case Marked + case Marked2(i: Int) + +trait MyMarker + +enum MyEnum(val value: String): + case Marked extends MyEnum("marked") with MyMarker + case Marked2(i: Int) extends MyEnum("marked") with MyMarker + +inline def enumMacro: Unit = ${ enumMacroExpr } + +private def enumMacroExpr(using Quotes): Expr[Unit] = + import quotes.reflect.* + assert(TypeRepr.of[MyEnum0].typeSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum0.Marked.type].termSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum0.Marked2].typeSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum].typeSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum.Marked.type].termSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum.Marked2].typeSymbol.flags.is(Flags.Enum)) + + '{} diff --git a/tests/pos-macros/i16008/Test_2.scala b/tests/pos-macros/i16008/Test_2.scala new file mode 100644 index 000000000000..43631e59e4b2 --- /dev/null +++ b/tests/pos-macros/i16008/Test_2.scala @@ -0,0 +1 @@ +def test = enumMacro diff --git a/tests/pos-macros/i16265.scala b/tests/pos-macros/i16265.scala new file mode 100644 index 000000000000..db75fbfa307c --- /dev/null +++ b/tests/pos-macros/i16265.scala @@ -0,0 +1,9 @@ +import scala.quoted.* + +class Foo(val value: Int) + +def foo(exprs: Expr[Any])(using Quotes): Any = + exprs match + case '{ $tuple: (Foo *: tail) } => + val x = '{ ${tuple}.head.value } + ??? diff --git a/tests/pos-macros/i16318/Macro_1.scala b/tests/pos-macros/i16318/Macro_1.scala new file mode 100644 index 000000000000..d66cebfd68b6 --- /dev/null +++ b/tests/pos-macros/i16318/Macro_1.scala @@ -0,0 +1,11 @@ +import scala.quoted.* + +final case class Record(a: String, b: Int) + +transparent inline def ann[T]: List[Any] = ${ annsImpl[T] } + +def annsImpl[T: Type](using Quotes): Expr[List[Any]] = { + import quotes.reflect.* + val annExpr = TypeRepr.of[T].typeSymbol.annotations.head.asExpr + '{ List($annExpr) } +} \ No newline at end of file diff --git a/tests/pos-macros/i16318/Test_2.scala b/tests/pos-macros/i16318/Test_2.scala new file mode 100644 index 000000000000..80eed17d26ba --- /dev/null +++ b/tests/pos-macros/i16318/Test_2.scala @@ -0,0 +1,2 @@ +def Test = + val a = ann[Record] \ No newline at end of file diff --git a/tests/pos-macros/i16420/Macro.scala b/tests/pos-macros/i16420/Macro.scala new file mode 100644 index 000000000000..1ea9406a0b9b --- /dev/null +++ b/tests/pos-macros/i16420/Macro.scala @@ -0,0 +1,21 @@ +import scala.quoted.{Expr, Quotes, Type} + +object Converter { + private def handleUnit[R](f: Expr[Int ?=> R])(using q: Quotes, rt: Type[R]): Expr[Unit] = '{} + + class UnitConverter[R] extends Converter[EmptyTuple, R, Int ?=> R] { + inline def convert(inline f: Int ?=> R): Unit = ${ handleUnit[R]('f) } + } + + inline given unitHandler[R]: UnitConverter[R] = new UnitConverter[R] +} + + +trait Converter[T <: Tuple, R, F] { + inline def convert(inline fn: F): Unit +} + +abstract class Directive[R <: Tuple] { + inline def apply[O, F](using inline c: Converter[R, O, F])(inline fn: F): Unit = + c.convert(fn) +} diff --git a/tests/pos-macros/i16420/Test.scala b/tests/pos-macros/i16420/Test.scala new file mode 100644 index 000000000000..f63cc62306af --- /dev/null +++ b/tests/pos-macros/i16420/Test.scala @@ -0,0 +1,8 @@ +object Meow extends App { + case class Meow(s: String, i: Int) + + val dir: Directive[EmptyTuple] = ??? + dir { + Meow("asd", 123) + } +} diff --git a/tests/pos-macros/i16615.scala b/tests/pos-macros/i16615.scala new file mode 100644 index 000000000000..3cc2d271fa87 --- /dev/null +++ b/tests/pos-macros/i16615.scala @@ -0,0 +1,19 @@ +import scala.quoted.* + +trait Api: + type Reader[E] + +def bugImpl[T: Type, Q[_]: Type](using Quotes) = + '{ + val p: Api = ??? + ${ + Type.of[p.Reader[T]] + Type.of[Q[p.Reader[T]]] + Type.of[p.Reader[Q[p.Reader[T]]]] + Type.of[List[p.Reader[T]]] + Type.of[p.Reader[List[p.Reader[T]]]] + Type.of[p.Reader[List[T]]] + Type.of[p.Reader[Q[T]]] + Expr(1) + } + } diff --git a/tests/pos-macros/i16636/Macro_1.scala b/tests/pos-macros/i16636/Macro_1.scala new file mode 100644 index 000000000000..78a3f6ef7b9b --- /dev/null +++ b/tests/pos-macros/i16636/Macro_1.scala @@ -0,0 +1,30 @@ +import scala.quoted.* + +trait ReproTransformer[A, B] { + def transform(from: A): B +} + +object ReproTransformer { + final class Identity[A, B >: A] extends ReproTransformer[A, B] { + def transform(from: A): B = from + } + + given identity[A, B >: A]: Identity[A, B] = Identity[A, B] + + inline def getTransformer[A, B]: ReproTransformer[A, B] = ${ getTransformerMacro[A, B] } + + def getTransformerMacro[A, B](using quotes: Quotes, A: Type[A], B: Type[B]) = { + import quotes.reflect.* + + val transformer = (A -> B) match { + case '[a] -> '[b] => + val summoned = Expr.summon[ReproTransformer[a, b]].get +// ----------- INTERESTING STUFF STARTS HERE + summoned match { + case '{ $t: ReproTransformer[src, dest] } => t + } +// ----------- INTERESTING STUFF ENDS HERE + } + transformer.asExprOf[ReproTransformer[A, B]] + } +} diff --git a/tests/pos-macros/i16636/Test_2.scala b/tests/pos-macros/i16636/Test_2.scala new file mode 100644 index 000000000000..eb8891ea7bf8 --- /dev/null +++ b/tests/pos-macros/i16636/Test_2.scala @@ -0,0 +1,9 @@ +object A { + case class AnotherCaseClass(name: String) + + val errorsOut1 = ReproTransformer.getTransformer[A.AnotherCaseClass, AnotherCaseClass] + val errorsOu2 = ReproTransformer.getTransformer[AnotherCaseClass, A.AnotherCaseClass] + + val works1 = ReproTransformer.getTransformer[A.AnotherCaseClass, A.AnotherCaseClass] + val works2 = ReproTransformer.getTransformer[AnotherCaseClass, AnotherCaseClass] +} diff --git a/tests/pos-macros/i16835/Macro_1.scala b/tests/pos-macros/i16835/Macro_1.scala new file mode 100644 index 000000000000..133d9f38d1da --- /dev/null +++ b/tests/pos-macros/i16835/Macro_1.scala @@ -0,0 +1,79 @@ +import scala.quoted.* +import scala.deriving.Mirror + +// derivation code is a slightly modified version of: https://github.com/lampepfl/dotty-macro-examples/blob/main/macroTypeClassDerivation/src/macro.scala +object Derivation { + + // Typeclass instance gets constructed as part of a macro + inline given deriveFullyConstrucedByMacro[A](using Mirror.ProductOf[A]): Show[A] = Derivation.deriveShow[A] + + // Typeclass instance is built inside as part of a method, only the 'show' impl is filled in by a macro + inline given derivePartiallyConstructedByMacro[A](using Mirror.ProductOf[A]): Show[A] = + new { + def show(value: A): String = Derivation.show(value) + } + + inline def show[T](value: T): String = ${ showValue('value) } + + inline def deriveShow[T]: Show[T] = ${ deriveCaseClassShow[T] } + + private def deriveCaseClassShow[T](using quotes: Quotes, tpe: Type[T]): Expr[Show[T]] = { + import quotes.reflect.* + // Getting the case fields of the case class + val fields: List[Symbol] = TypeTree.of[T].symbol.caseFields + + '{ + new Show[T] { + override def show(t: T): String = + ${ showValue('t) } + } + } + } + + def showValue[T: Type](value: Expr[T])(using Quotes): Expr[String] = { + import quotes.reflect.* + + val fields: List[Symbol] = TypeTree.of[T].symbol.caseFields + + val vTerm: Term = value.asTerm + val valuesExprs: List[Expr[String]] = fields.map(showField(vTerm, _)) + val exprOfList: Expr[List[String]] = Expr.ofList(valuesExprs) + '{ "{ " + $exprOfList.mkString(", ") + " }" } + } + + /** Create a quoted String representation of a given field of the case class */ + private def showField(using Quotes)(caseClassTerm: quotes.reflect.Term, field: quotes.reflect.Symbol): Expr[String] = { + import quotes.reflect.* + + val fieldValDef: ValDef = field.tree.asInstanceOf[ValDef] + val fieldTpe: TypeRepr = fieldValDef.tpt.tpe + val fieldName: String = fieldValDef.name + + val tcl: Term = lookupShowFor(fieldTpe) // Show[$fieldTpe] + val fieldValue: Term = Select(caseClassTerm, field) // v.field + val strRepr: Expr[String] = applyShow(tcl, fieldValue).asExprOf[String] + '{ ${ Expr(fieldName) } + ": " + $strRepr } // summon[Show[$fieldTpe]].show(v.field) + } + + /** Look up the Show[$t] typeclass for a given type t */ + private def lookupShowFor(using Quotes)(t: quotes.reflect.TypeRepr): quotes.reflect.Term = { + import quotes.reflect.* + t.asType match { + case '[tpe] => + Implicits.search(TypeRepr.of[Show[tpe]]) match { + case res: ImplicitSearchSuccess => res.tree + case failure: DivergingImplicit => report.errorAndAbort(s"Diverving: ${failure.explanation}") + case failure: NoMatchingImplicits => report.errorAndAbort(s"NoMatching: ${failure.explanation}") + case failure: AmbiguousImplicits => report.errorAndAbort(s"Ambiguous: ${failure.explanation}") + case failure: ImplicitSearchFailure => + report.errorAndAbort(s"catch all: ${failure.explanation}") + } + } + } + + /** Composes the tree: $tcl.show($arg) */ + private def applyShow(using Quotes)(tcl: quotes.reflect.Term, arg: quotes.reflect.Term): quotes.reflect.Term = { + import quotes.reflect.* + Apply(Select.unique(tcl, "show"), arg :: Nil) + } +} diff --git a/tests/pos-macros/i16835/Show_1.scala b/tests/pos-macros/i16835/Show_1.scala new file mode 100644 index 000000000000..61f6b2dccd80 --- /dev/null +++ b/tests/pos-macros/i16835/Show_1.scala @@ -0,0 +1,11 @@ +trait Show[A] { + def show(value: A): String +} + +object Show { + given identity: Show[String] = a => a + + given int: Show[Int] = _.toString() + + given list[A](using A: Show[A]): Show[List[A]] = _.map(A.show).toString() +} diff --git a/tests/pos-macros/i16835/Test_2.scala b/tests/pos-macros/i16835/Test_2.scala new file mode 100644 index 000000000000..61019b1417b6 --- /dev/null +++ b/tests/pos-macros/i16835/Test_2.scala @@ -0,0 +1,30 @@ +import scala.deriving.* + +object usage { + final case class Person(name: String, age: Int, otherNames: List[String], p2: Person2) + + final case class Person2(name: String, age: Int, otherNames: List[String]) + + locally { + import Derivation.deriveFullyConstrucedByMacro + // works for case classes without other nested case classes inside + summon[Show[Person2]] + + // also derives instances with nested case classes + summon[Show[Person]] + } + + locally { + import Derivation.derivePartiallyConstructedByMacro + + // works for case classes without other nested case classes inside + summon[Show[Person2]] + + // fails for case classes with other nested case classes inside, + // note how that error is not a `NonMatching', `Diverging` or `Ambiguous` implicit search error but something else + /* + catch all: given instance deriveWithConstructionOutsideMacro in object Derivation does not match type io.github.arainko.ducktape.issue_repros.Show[Person2] + */ + summon[Show[Person]] + } +} \ No newline at end of file diff --git a/tests/pos-macros/i16843a/Macro_1.scala b/tests/pos-macros/i16843a/Macro_1.scala new file mode 100644 index 000000000000..98c1505910e6 --- /dev/null +++ b/tests/pos-macros/i16843a/Macro_1.scala @@ -0,0 +1,13 @@ +import scala.quoted.* + +case class Foo(x: Int) + +inline def foo = ${ fooImpl } + +def fooImpl(using Quotes) = + val tmp = '{ + 1 match + case x @ (y: Int) => 0 + } + + '{} diff --git a/tests/pos-macros/i16843a/Test_2.scala b/tests/pos-macros/i16843a/Test_2.scala new file mode 100644 index 000000000000..25406428d0cf --- /dev/null +++ b/tests/pos-macros/i16843a/Test_2.scala @@ -0,0 +1 @@ +val x = foo diff --git a/tests/pos-macros/i16843b/Macro_1.scala b/tests/pos-macros/i16843b/Macro_1.scala new file mode 100644 index 000000000000..631bde56f1f1 --- /dev/null +++ b/tests/pos-macros/i16843b/Macro_1.scala @@ -0,0 +1,18 @@ +import scala.quoted.* + +inline def foo: Int = ${ fooImpl } + +def fooImpl(using Quotes): Expr[Int] = + '{ + val b = ${ + val a = '{ + (1: Int) match + case x @ (y: Int) => 0 + } + a + } + + (1: Int) match + case x @ (y: Int) => 0 + } + diff --git a/tests/pos-macros/i16843b/Test_2.scala b/tests/pos-macros/i16843b/Test_2.scala new file mode 100644 index 000000000000..54c769c9618f --- /dev/null +++ b/tests/pos-macros/i16843b/Test_2.scala @@ -0,0 +1 @@ +def test = foo diff --git a/tests/pos-macros/i16959/Macro_1.scala b/tests/pos-macros/i16959/Macro_1.scala new file mode 100644 index 000000000000..61483bff7ff1 --- /dev/null +++ b/tests/pos-macros/i16959/Macro_1.scala @@ -0,0 +1,17 @@ +import scala.quoted.* + +inline def test = ${ testImpl } + +def testImpl(using Quotes) = + import quotes.reflect.* + + val int = PackedType[Int] + val string = PackedType[String] + + assert(Type.show[(int.U, string.U, string.U)] == "scala.Tuple3[scala.Int, java.lang.String, java.lang.String]") + + '{ () } + +final class PackedType[T](using t: Type[T]): + opaque type U = T + given tpe: Type[U] = t diff --git a/tests/pos-macros/i16959/Test_2.scala b/tests/pos-macros/i16959/Test_2.scala new file mode 100644 index 000000000000..e9772d026451 --- /dev/null +++ b/tests/pos-macros/i16959/Test_2.scala @@ -0,0 +1 @@ +def app = test diff --git a/tests/pos-macros/i16961/Macro_1.scala b/tests/pos-macros/i16961/Macro_1.scala new file mode 100644 index 000000000000..20ec6b439ec8 --- /dev/null +++ b/tests/pos-macros/i16961/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +inline def myMacro = ${ myMacroImpl } + +def myMacroImpl(using Quotes) = + import quotes.reflect.* + + PolyType(List("arg"))(_ => List(TypeBounds.empty), _ => TypeRepr.of[Any]) match + case _: TypeLambda => quotes.reflect.report.errorAndAbort("PolyType should not be a TypeLambda") + case _ => '{ () } // Ok diff --git a/tests/pos-macros/i16961/Test_2.scala b/tests/pos-macros/i16961/Test_2.scala new file mode 100644 index 000000000000..76a9e17659db --- /dev/null +++ b/tests/pos-macros/i16961/Test_2.scala @@ -0,0 +1 @@ +def test = myMacro diff --git a/tests/pos-macros/i17026.scala b/tests/pos-macros/i17026.scala new file mode 100644 index 000000000000..d8845ef1d086 --- /dev/null +++ b/tests/pos-macros/i17026.scala @@ -0,0 +1,3 @@ +import scala.quoted.* +def macroImpl(using Quotes) = + '{ def weird[A: Type](using Quotes) = Type.of[A] } diff --git a/tests/pos-macros/i17026b.scala b/tests/pos-macros/i17026b.scala new file mode 100644 index 000000000000..98a29066462e --- /dev/null +++ b/tests/pos-macros/i17026b.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +def macroImpl(using Quotes) = + '{ + def weird[A: ToExpr: Type](a: A)(using quotes: Quotes) = + '{ Some(${ Expr(a) }) } + } diff --git a/tests/pos-macros/i17037.scala b/tests/pos-macros/i17037.scala new file mode 100644 index 000000000000..1048d84ffe96 --- /dev/null +++ b/tests/pos-macros/i17037.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +class Foo: + type Bar = Int + +def macroImpl(using Quotes) = + val foo = new Foo + Type.of[foo.Bar] diff --git a/tests/pos-macros/i17037b.scala b/tests/pos-macros/i17037b.scala new file mode 100644 index 000000000000..60d2bec33330 --- /dev/null +++ b/tests/pos-macros/i17037b.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +class Foo: + type Bar = Int + +def macroImpl(using Quotes) = + val foo = Foo() + Type.of[foo.Bar] match + case '[foo.Bar] => '{true} + case _ => '{false} diff --git a/tests/pos-macros/i17037c.scala b/tests/pos-macros/i17037c.scala new file mode 100644 index 000000000000..56cd8f7a2d41 --- /dev/null +++ b/tests/pos-macros/i17037c.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +class Foo: + type Bar = Int + def macroImpl(using Quotes) = + val foo = new Foo + Type.of[this.Bar] diff --git a/tests/pos-macros/i17039.scala b/tests/pos-macros/i17039.scala new file mode 100644 index 000000000000..6f983b138526 --- /dev/null +++ b/tests/pos-macros/i17039.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +def macroImpl(using Quotes) = + val t = summon[Type[Int]] + Type.of[Int] match + case '[t.Underlying] => '{true} + case _ => '{false} diff --git a/tests/pos-macros/i17103a.scala b/tests/pos-macros/i17103a.scala new file mode 100644 index 000000000000..ffd0c15f28b2 --- /dev/null +++ b/tests/pos-macros/i17103a.scala @@ -0,0 +1,21 @@ +import scala.quoted.* + +trait C0: + def d: Int + +def test(using Quotes): Expr[Unit] = + '{ + trait C1 extends C0: + def d: Int + trait C extends C1: + def d: Int + val c: C = ??? + ${ + val expr = '{ + val cRef: C = ??? + cRef.d // calls C0.d + () + } + expr + } + } diff --git a/tests/pos-macros/i17103b.scala b/tests/pos-macros/i17103b.scala new file mode 100644 index 000000000000..0fbe86f0cf73 --- /dev/null +++ b/tests/pos-macros/i17103b.scala @@ -0,0 +1,21 @@ +import scala.quoted.* + +trait C0: + def d: Int + +def test(using Quotes): Expr[Unit] = + '{ + trait C1 extends C0: + def d: Int + trait C extends C1: + def d: Int + val c: C = ??? + ${ + val expr = '{ + val cRef: c.type = ??? + cRef.d // calls C0.d + () + } + expr + } + } diff --git a/tests/pos-macros/i17227/Macro_1.scala b/tests/pos-macros/i17227/Macro_1.scala new file mode 100644 index 000000000000..b483336119cb --- /dev/null +++ b/tests/pos-macros/i17227/Macro_1.scala @@ -0,0 +1,22 @@ +import scala.quoted.* + +inline def foo(f: Int => Int): Int => Int = ${impl('f)} +inline def bar(inline f: Int => Int): Int => Int = ${impl('f)} +inline def baz(inline f: (Int => Int)*): Int => Int = ${impl2('f)} + +def impl(f: Expr[Int => Int])(using Quotes): Expr[Int => Int] = + assertNoNamedArgs(f) + '{identity} + +def impl2(f: Expr[Seq[Int => Int]])(using Quotes): Expr[Int => Int] = + assertNoNamedArgs(f) + '{identity} + +def assertNoNamedArgs(expr: Expr[Any])(using Quotes): Unit = + import quotes.reflect.* + new TreeTraverser { + override def traverseTree(tree: Tree)(owner: Symbol): Unit = tree match + case _: NamedArg => + report.throwError(s"Unexpected NamedArg after inlining: ${tree}", tree.pos) + case _ => traverseTreeChildren(tree)(owner) + }.traverseTree(expr.asTerm)(Symbol.spliceOwner) diff --git a/tests/pos-macros/i17227/Test_2.scala b/tests/pos-macros/i17227/Test_2.scala new file mode 100644 index 000000000000..4106113d94c0 --- /dev/null +++ b/tests/pos-macros/i17227/Test_2.scala @@ -0,0 +1,6 @@ +def g(i: Int): Int = i + +def test = + foo(f = g) + bar(f = g) + baz(f = g) diff --git a/tests/pos-macros/i17293.scala b/tests/pos-macros/i17293.scala new file mode 100644 index 000000000000..57eba1181903 --- /dev/null +++ b/tests/pos-macros/i17293.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +trait OuterTrait { + trait X +} + +def exampleMacro[T <: OuterTrait: Type](expr: Expr[T])(using Quotes): Expr[OuterTrait#X] = { + '{ + val prefix: T = ${ expr } + new prefix.X {} + } +} diff --git a/tests/pos-macros/i17293b.scala b/tests/pos-macros/i17293b.scala new file mode 100644 index 000000000000..a8b73ba6176b --- /dev/null +++ b/tests/pos-macros/i17293b.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +trait OuterTrait { self => + trait X + + def exampleMacro[T <: self.type: Type](expr: Expr[T])(using Quotes): Expr[self.X] = { + '{ + val prefix: T = ${ expr } + new prefix.X {} + } + } +} \ No newline at end of file diff --git a/tests/pos-macros/i17409.scala b/tests/pos-macros/i17409.scala new file mode 100644 index 000000000000..449e0576d84b --- /dev/null +++ b/tests/pos-macros/i17409.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +transparent inline def thing = + ${ thingImpl } + +def thingImpl(using Quotes): Expr[Any] = + '{ + def makeThing: { def me: this.type } = ??? + makeThing + } diff --git a/tests/pos-macros/i17434a/Macro.scala b/tests/pos-macros/i17434a/Macro.scala new file mode 100644 index 000000000000..0e399d82a9d1 --- /dev/null +++ b/tests/pos-macros/i17434a/Macro.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +object SelectDynamicMacroImpl { + def selectImpl[E: Type]( + ref: Expr[SQLSyntaxProvider[_]], + name: Expr[String] + )(using Quotes): Expr[SQLSyntax] = '{SQLSyntax("foo")} +} diff --git a/tests/pos-macros/i17434a/Test.scala b/tests/pos-macros/i17434a/Test.scala new file mode 100644 index 000000000000..8e7c314b238d --- /dev/null +++ b/tests/pos-macros/i17434a/Test.scala @@ -0,0 +1,23 @@ +// test.scala +import scala.language.dynamics + +trait SQLSyntaxProvider[A] extends Dynamic{ + def field(name: String): SQLSyntax = ??? + + inline def selectDynamic(inline name: String): SQLSyntax = + select[A](this, name) + + inline def select[E](ref: SQLSyntaxProvider[A], inline name: String): SQLSyntax = + ${ SelectDynamicMacroImpl.selectImpl[E]('ref, 'name) } +} + +class SQLSyntax(value: String) +trait SQLSyntaxSupport[A] +case class ColumnSQLSyntaxProvider[S <: SQLSyntaxSupport[A], A](support: S) extends SQLSyntaxProvider[A] + +case class Account(id: Long, name: String) +object Account extends SQLSyntaxSupport[Account] + +def Test() = + val p = ColumnSQLSyntaxProvider[Account.type, Account](Account) + assert(p.name == SQLSyntax("name")) diff --git a/tests/pos-macros/i17434b/Macro.scala b/tests/pos-macros/i17434b/Macro.scala new file mode 100644 index 000000000000..adca2888f777 --- /dev/null +++ b/tests/pos-macros/i17434b/Macro.scala @@ -0,0 +1,29 @@ +trait NameOf: + transparent inline def nameOf(inline expr: Any): String = ${NameOfImpl.nameOf('expr)} + transparent inline def nameOf[T](inline expr: T => Any): String = ${NameOfImpl.nameOf('expr)} +object NameOf extends NameOf + +import scala.compiletime.* + +import scala.annotation.tailrec +import scala.quoted.* + +object NameOfImpl { + def nameOf(expr: Expr[Any])(using Quotes): Expr[String] = { + import quotes.reflect.* + @tailrec def extract(tree: Tree): String = tree match { + case Ident(name) => name + case Select(_, name) => name + case Block(List(stmt), term) => extract(stmt) + case DefDef("$anonfun", _, _, Some(term)) => extract(term) + case Block(_, term) => extract(term) + case Apply(term, _) if term.symbol.fullName != ".throw" => extract(term) + case TypeApply(term, _) => extract(term) + case Inlined(_, _, term) => extract(term) + case Typed(term, _) => extract(term) + case _ => throw new MatchError(s"Unsupported expression: ${expr.show}") + } + val name = extract(expr.asTerm) + Expr(name) + } +} diff --git a/tests/pos-macros/i17434b/Test.scala b/tests/pos-macros/i17434b/Test.scala new file mode 100644 index 000000000000..5e71f9c95965 --- /dev/null +++ b/tests/pos-macros/i17434b/Test.scala @@ -0,0 +1,6 @@ +import NameOf._ +def test() = + def func1(x: Int): String = ??? + val funcVal = func1 _ + assert(nameOf(funcVal) == "funcVal") + assert(nameOf(func1 _) == "func1") diff --git a/tests/pos-macros/i17434c/Macro.scala b/tests/pos-macros/i17434c/Macro.scala new file mode 100644 index 000000000000..dc3d2a533117 --- /dev/null +++ b/tests/pos-macros/i17434c/Macro.scala @@ -0,0 +1,3 @@ +import scala.quoted.* +inline def foo[T](expr: T => Any): Unit = ${impl('expr)} +def impl(expr: Expr[Any])(using Quotes): Expr[Unit] = '{} diff --git a/tests/pos-macros/i17434c/Test.scala b/tests/pos-macros/i17434c/Test.scala new file mode 100644 index 000000000000..6561dd193b63 --- /dev/null +++ b/tests/pos-macros/i17434c/Test.scala @@ -0,0 +1 @@ +def test(f: Int => Any) = foo(f) diff --git a/tests/pos-macros/i17434d/Macro.scala b/tests/pos-macros/i17434d/Macro.scala new file mode 100644 index 000000000000..a76c8aab58e4 --- /dev/null +++ b/tests/pos-macros/i17434d/Macro.scala @@ -0,0 +1,2 @@ +import scala.quoted.* +def impl[E: Type](ref: Expr[Foo[_]])(using Quotes): Expr[Unit] = '{ } diff --git a/tests/pos-macros/i17434d/Test.scala b/tests/pos-macros/i17434d/Test.scala new file mode 100644 index 000000000000..3af0ddecd061 --- /dev/null +++ b/tests/pos-macros/i17434d/Test.scala @@ -0,0 +1,4 @@ +trait Foo[A]: + inline def foo(): Unit = bar[this.type](this) + inline def bar[E](ref: Foo[A]): Unit = ${ impl[E]('ref) } +def test(p: Foo[Int]) = p.foo() diff --git a/tests/pos-macros/i17606/Macros_1.scala b/tests/pos-macros/i17606/Macros_1.scala new file mode 100644 index 000000000000..245f2df66e7b --- /dev/null +++ b/tests/pos-macros/i17606/Macros_1.scala @@ -0,0 +1,14 @@ +package example + +import scala.quoted.* + +object A { + inline def f(inline a: Any): Boolean = ${ impl('a) } + + def impl(a: Expr[Any])(using Quotes): Expr[Boolean] = { + a match { + case '{ new String($x: Array[Byte]) } => Expr(true) + case _ => quotes.reflect.report.errorAndAbort("Expected match", a) + } + } +} diff --git a/tests/pos-macros/i17606/Test_2.scala b/tests/pos-macros/i17606/Test_2.scala new file mode 100644 index 000000000000..ebf535bc2ae9 --- /dev/null +++ b/tests/pos-macros/i17606/Test_2.scala @@ -0,0 +1,8 @@ +package example + +object Main { + def main(args: Array[String]): Unit = { + val x = A.f(new String(Array.empty[Byte])) + println(x) + } +} diff --git a/tests/pos-macros/i7405b.scala b/tests/pos-macros/i7405b.scala index df7218608e88..6c73c275e15f 100644 --- a/tests/pos-macros/i7405b.scala +++ b/tests/pos-macros/i7405b.scala @@ -3,7 +3,7 @@ import scala.quoted.* class Foo { def f(using Quotes): Expr[Any] = { '{ - trait X { + trait X extends A { type Y def y: Y = ??? } @@ -17,3 +17,7 @@ class Foo { } } } + +trait A: + type Y + def y: Y = ??? diff --git a/tests/pos-macros/i8100b.scala b/tests/pos-macros/i8100b.scala new file mode 100644 index 000000000000..ecba10e439d2 --- /dev/null +++ b/tests/pos-macros/i8100b.scala @@ -0,0 +1,37 @@ +import scala.quoted.* + +def f[T](using t: Type[T])(using Quotes) = + '{ + // @SplicedType type t$1 = t.Underlying + type T2 = T // type T2 = t$1 + ${ + + val t0: T = ??? + val t1: T2 = ??? // val t1: T = ??? + val tp1 = Type.of[T] // val tp1 = t + val tp2 = Type.of[T2] // val tp2 = t + '{ + // @SplicedType type t$2 = t.Underlying + val t3: T = ??? // val t3: t$2 = ??? + val t4: T2 = ??? // val t4: t$2 = ??? + } + } + } + +def g(using Quotes) = + '{ + type U + type U2 = U + ${ + + val u1: U = ??? + val u2: U2 = ??? // val u2: U = ??? + + val tp1 = Type.of[U] // val tp1 = Type.of[U] + val tp2 = Type.of[U2] // val tp2 = Type.of[U] + '{ + val u3: U = ??? + val u4: U2 = ??? // val u4: U = ??? + } + } + } diff --git a/tests/pos-macros/i8577a/Macro_1.scala b/tests/pos-macros/i8577a/Macro_1.scala new file mode 100644 index 000000000000..3831f060f918 --- /dev/null +++ b/tests/pos-macros/i8577a/Macro_1.scala @@ -0,0 +1,11 @@ +package i8577 + +import scala.quoted._ + +object Macro: + opaque type StrCtx = StringContext + def apply(ctx: StringContext): StrCtx = ctx + def unapply(ctx: StrCtx): Option[StringContext] = Some(ctx) + +def implUnapply(sc: Expr[Macro.StrCtx], input: Expr[Int])(using Quotes): Expr[Option[Seq[Int]]] = + '{ Some(Seq(${input})) } diff --git a/tests/pos-macros/i8577a/Main_2.scala b/tests/pos-macros/i8577a/Main_2.scala new file mode 100644 index 000000000000..5a0f6b609f81 --- /dev/null +++ b/tests/pos-macros/i8577a/Main_2.scala @@ -0,0 +1,9 @@ +package i8577 + +def main: Unit = + extension (ctx: StringContext) def mac: Macro.StrCtx = Macro(ctx) + extension (inline ctx: Macro.StrCtx) inline def unapplySeq(inline input: Int): Option[Seq[Int]] = + ${ implUnapply('ctx, 'input) } + + val mac"$x" = 1 + assert(x == 1) diff --git a/tests/pos-macros/i8577b/Macro_1.scala b/tests/pos-macros/i8577b/Macro_1.scala new file mode 100644 index 000000000000..464d9894fa1c --- /dev/null +++ b/tests/pos-macros/i8577b/Macro_1.scala @@ -0,0 +1,11 @@ +package i8577 + +import scala.quoted._ + +object Macro: + opaque type StrCtx = StringContext + def apply(ctx: StringContext): StrCtx = ctx + def unapply(ctx: StrCtx): Option[StringContext] = Some(ctx) + +def implUnapply[U](sc: Expr[Macro.StrCtx], input: Expr[U])(using Type[U])(using Quotes): Expr[Option[Seq[U]]] = + '{ Some(Seq(${input})) } diff --git a/tests/pos-macros/i8577b/Main_2.scala b/tests/pos-macros/i8577b/Main_2.scala new file mode 100644 index 000000000000..789e572bd5aa --- /dev/null +++ b/tests/pos-macros/i8577b/Main_2.scala @@ -0,0 +1,9 @@ +package i8577 + +def main: Unit = + extension (ctx: StringContext) def mac: Macro.StrCtx = Macro(ctx) + extension (inline ctx: Macro.StrCtx) inline def unapplySeq[U](inline input: U): Option[Seq[U]] = + ${ implUnapply('ctx, 'input) } + + val mac"$x" = 1 + assert(x == 1) diff --git a/tests/pos-macros/i8577c/Macro_1.scala b/tests/pos-macros/i8577c/Macro_1.scala new file mode 100644 index 000000000000..45986b34d48d --- /dev/null +++ b/tests/pos-macros/i8577c/Macro_1.scala @@ -0,0 +1,11 @@ +package i8577 + +import scala.quoted._ + +object Macro: + opaque type StrCtx = StringContext + def apply(ctx: StringContext): StrCtx = ctx + def unapply(ctx: StrCtx): Option[StringContext] = Some(ctx) + +def implUnapply[T](sc: Expr[Macro.StrCtx], input: Expr[T])(using Type[T])(using Quotes): Expr[Option[Seq[T]]] = + '{ Some(Seq(${input})) } diff --git a/tests/pos-macros/i8577c/Main_2.scala b/tests/pos-macros/i8577c/Main_2.scala new file mode 100644 index 000000000000..4f42c7635ec5 --- /dev/null +++ b/tests/pos-macros/i8577c/Main_2.scala @@ -0,0 +1,9 @@ +package i8577 + +def main: Unit = + extension (ctx: StringContext) def mac: Macro.StrCtx = Macro(ctx) + extension [T] (inline ctx: Macro.StrCtx) inline def unapplySeq(inline input: T): Option[Seq[T]] = + ${ implUnapply('ctx, 'input) } + + val mac"$x" = 1 + assert(x == 1) diff --git a/tests/pos-macros/i8577d/Macro_1.scala b/tests/pos-macros/i8577d/Macro_1.scala new file mode 100644 index 000000000000..45986b34d48d --- /dev/null +++ b/tests/pos-macros/i8577d/Macro_1.scala @@ -0,0 +1,11 @@ +package i8577 + +import scala.quoted._ + +object Macro: + opaque type StrCtx = StringContext + def apply(ctx: StringContext): StrCtx = ctx + def unapply(ctx: StrCtx): Option[StringContext] = Some(ctx) + +def implUnapply[T](sc: Expr[Macro.StrCtx], input: Expr[T])(using Type[T])(using Quotes): Expr[Option[Seq[T]]] = + '{ Some(Seq(${input})) } diff --git a/tests/pos-macros/i8577d/Main_2.scala b/tests/pos-macros/i8577d/Main_2.scala new file mode 100644 index 000000000000..a87f06503b31 --- /dev/null +++ b/tests/pos-macros/i8577d/Main_2.scala @@ -0,0 +1,9 @@ +package i8577 + +def main: Unit = + extension (ctx: StringContext) def mac: Macro.StrCtx = Macro(ctx) + extension [T] (inline ctx: Macro.StrCtx) inline def unapplySeq[U](inline input: T): Option[Seq[T]] = + ${ implUnapply('ctx, 'input) } + + val mac"$x" = 1 + assert(x == 1) diff --git a/tests/pos-macros/i8577e/Macro_1.scala b/tests/pos-macros/i8577e/Macro_1.scala new file mode 100644 index 000000000000..cf133d33a100 --- /dev/null +++ b/tests/pos-macros/i8577e/Macro_1.scala @@ -0,0 +1,11 @@ +package i8577 + +import scala.quoted._ + +object Macro: + opaque type StrCtx = StringContext + def apply(ctx: StringContext): StrCtx = ctx + def unapply(ctx: StrCtx): Option[StringContext] = Some(ctx) + +def implUnapply[T, U](sc: Expr[Macro.StrCtx], input: Expr[U])(using Type[U])(using Quotes): Expr[Option[Seq[U]]] = + '{ Some(Seq(${input})) } diff --git a/tests/pos-macros/i8577e/Main_2.scala b/tests/pos-macros/i8577e/Main_2.scala new file mode 100644 index 000000000000..598d18d2faec --- /dev/null +++ b/tests/pos-macros/i8577e/Main_2.scala @@ -0,0 +1,9 @@ +package i8577 + +def main: Unit = + extension (ctx: StringContext) def mac: Macro.StrCtx = Macro(ctx) + extension [T] (inline ctx: Macro.StrCtx) inline def unapplySeq[U](inline input: U): Option[Seq[U]] = + ${ implUnapply('ctx, 'input) } + + val mac"$x" = 1 + assert(x == 1) diff --git a/tests/pos-macros/i8577f/Macro_1.scala b/tests/pos-macros/i8577f/Macro_1.scala new file mode 100644 index 000000000000..7d3b5df28701 --- /dev/null +++ b/tests/pos-macros/i8577f/Macro_1.scala @@ -0,0 +1,11 @@ +package i8577 + +import scala.quoted._ + +object Macro: + opaque type StrCtx = StringContext + def apply(ctx: StringContext): StrCtx = ctx + def unapply(ctx: StrCtx): Option[StringContext] = Some(ctx) + +def implUnapply[T, U](sc: Expr[Macro.StrCtx], input: Expr[(T, U)])(using Type[T], Type[U])(using Quotes): Expr[Option[Seq[(T, U)]]] = + '{ Some(Seq(${input})) } diff --git a/tests/pos-macros/i8577f/Main_2.scala b/tests/pos-macros/i8577f/Main_2.scala new file mode 100644 index 000000000000..fd1bb3e6186f --- /dev/null +++ b/tests/pos-macros/i8577f/Main_2.scala @@ -0,0 +1,12 @@ +package i8577 + +def main: Unit = + extension (ctx: StringContext) def mac: Macro.StrCtx = Macro(ctx) + extension [T] (inline ctx: Macro.StrCtx) inline def unapplySeq[U](inline input: (T, U)): Option[Seq[(T, U)]] = + ${ implUnapply('ctx, 'input) } + + val mac"$x" = (1, 2) + assert(x == (1, 2)) + + val mac"$y" = (1, "a") + assert(y == (1, "a")) diff --git a/tests/pos-macros/i8577g/Macro_1.scala b/tests/pos-macros/i8577g/Macro_1.scala new file mode 100644 index 000000000000..2da12d6e23fd --- /dev/null +++ b/tests/pos-macros/i8577g/Macro_1.scala @@ -0,0 +1,11 @@ +package i8577 + +import scala.quoted._ + +object Macro: + opaque type StrCtx = StringContext + def apply(ctx: StringContext): StrCtx = ctx + def unapply(ctx: StrCtx): Option[StringContext] = Some(ctx) + +def implUnapply[T, U](sc: Expr[Macro.StrCtx], input: Expr[T | U])(using Type[T], Type[U])(using Quotes): Expr[Option[Seq[T | U]]] = + '{ Some(Seq(${input})) } diff --git a/tests/pos-macros/i8577g/Main_2.scala b/tests/pos-macros/i8577g/Main_2.scala new file mode 100644 index 000000000000..4998b9962802 --- /dev/null +++ b/tests/pos-macros/i8577g/Main_2.scala @@ -0,0 +1,9 @@ +package i8577 + +def main: Unit = + extension (ctx: StringContext) def mac: Macro.StrCtx = Macro(ctx) + extension [T] (inline ctx: Macro.StrCtx) inline def unapplySeq[U](inline input: T | U): Option[Seq[T | U]] = + ${ implUnapply('ctx, 'input) } + + val mac"$x" = 1 + assert(x == 1) diff --git a/tests/pos-macros/i8577h/Macro_1.scala b/tests/pos-macros/i8577h/Macro_1.scala new file mode 100644 index 000000000000..2da12d6e23fd --- /dev/null +++ b/tests/pos-macros/i8577h/Macro_1.scala @@ -0,0 +1,11 @@ +package i8577 + +import scala.quoted._ + +object Macro: + opaque type StrCtx = StringContext + def apply(ctx: StringContext): StrCtx = ctx + def unapply(ctx: StrCtx): Option[StringContext] = Some(ctx) + +def implUnapply[T, U](sc: Expr[Macro.StrCtx], input: Expr[T | U])(using Type[T], Type[U])(using Quotes): Expr[Option[Seq[T | U]]] = + '{ Some(Seq(${input})) } diff --git a/tests/pos-macros/i8577h/Main_2.scala b/tests/pos-macros/i8577h/Main_2.scala new file mode 100644 index 000000000000..9fe2565a0ec3 --- /dev/null +++ b/tests/pos-macros/i8577h/Main_2.scala @@ -0,0 +1,9 @@ +package i8577 + +def main: Unit = + extension (ctx: StringContext) def mac: Macro.StrCtx = Macro(ctx) + extension [T] (inline ctx: Macro.StrCtx) inline def unapplySeq[U](inline input: U | T): Option[Seq[T | U]] = + ${ implUnapply('ctx, 'input) } + + val mac"$x" = 1 + assert(x == 1) diff --git a/tests/pos-macros/i8858/Macro_1.scala b/tests/pos-macros/i8858/Macro_1.scala index 8eb0182c2779..d1647b3dbba6 100644 --- a/tests/pos-macros/i8858/Macro_1.scala +++ b/tests/pos-macros/i8858/Macro_1.scala @@ -5,5 +5,5 @@ def mcrImpl(expr: Expr[Any])(using Quotes): Expr[Any] = import quotes.reflect._ expr.asTerm match case Inlined(_, _, id1) => - println(id1.tpe.widen.show) + id1.tpe.widen.show '{()} diff --git a/tests/pos-macros/i8887.scala b/tests/pos-macros/i8887.scala new file mode 100644 index 000000000000..5bfd5501063b --- /dev/null +++ b/tests/pos-macros/i8887.scala @@ -0,0 +1,3 @@ +import scala.quoted._ +inline def foo(x: Any): Any = ${ expr[x.type] } +def expr[X](using Quotes): Expr[Any] = ??? diff --git a/tests/pos-macros/i9360.scala b/tests/pos-macros/i9360.scala new file mode 100644 index 000000000000..699ef5f38bee --- /dev/null +++ b/tests/pos-macros/i9360.scala @@ -0,0 +1,22 @@ +package a + +import scala.quoted._ + +trait CPM[F[_]] + +def fun[M[_],T](t:T)(using m:CPM[M]):M[T] = ??? + +object M { + + inline def transform[F[_],T](t:T): F[T] = + ${ transformImpl[F,T]('t) } + + def transformImpl[F[_]:Type,T:Type](t:Expr[T])(using Quotes):Expr[F[T]] = { + import quotes.reflect._ + t match { + case '{ type mt[_]; a.fun[`mt`, tt]($t)(using $m) } => ??? + } + + } + +} diff --git a/tests/pos-macros/i9684/Macro_1.scala b/tests/pos-macros/i9684/Macro_1.scala index 7b47efefdfd8..2fef3ac99817 100644 --- a/tests/pos-macros/i9684/Macro_1.scala +++ b/tests/pos-macros/i9684/Macro_1.scala @@ -9,7 +9,6 @@ object X { def printTypeImpl[A:Type](x:Expr[A])(using Quotes): Expr[String] = { import quotes.reflect._ val value: String = x.asTerm.tpe.show - println(value) Expr( value ) } diff --git a/tests/pos-macros/macro-deprecation.scala b/tests/pos-macros/macro-deprecation.scala new file mode 100644 index 000000000000..ff14f96ac7fa --- /dev/null +++ b/tests/pos-macros/macro-deprecation.scala @@ -0,0 +1,4 @@ +import scala.quoted.* + +inline def f = ${ impl } // error +@deprecated def impl(using Quotes) = '{1} diff --git a/tests/pos-macros/macro-experimental.scala b/tests/pos-macros/macro-experimental.scala new file mode 100644 index 000000000000..dc011f4e45b9 --- /dev/null +++ b/tests/pos-macros/macro-experimental.scala @@ -0,0 +1,5 @@ +import scala.quoted.* +import scala.annotation.experimental + +inline def f = ${ impl } // error +@experimental def impl(using Quotes) = '{1} diff --git a/tests/pos-macros/macro-inline-by-name-cast/Macro_1.scala b/tests/pos-macros/macro-inline-by-name-cast/Macro_1.scala new file mode 100644 index 000000000000..7d9e186ed94f --- /dev/null +++ b/tests/pos-macros/macro-inline-by-name-cast/Macro_1.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +inline def f[T](inline code: =>T): Any = + ${ create[T]('{ () => code }) } + +def create[T: Type](code: Expr[() => T])(using Quotes): Expr[Any] = + '{ identity($code) } diff --git a/tests/pos-macros/macro-inline-by-name-cast/Test_2.scala b/tests/pos-macros/macro-inline-by-name-cast/Test_2.scala new file mode 100644 index 000000000000..161f58748342 --- /dev/null +++ b/tests/pos-macros/macro-inline-by-name-cast/Test_2.scala @@ -0,0 +1 @@ +def test: Unit = f[Unit](???) diff --git a/tests/pos-macros/path-dependent-type-capture/Macro_1.scala b/tests/pos-macros/path-dependent-type-capture/Macro_1.scala new file mode 100644 index 000000000000..588e50846eff --- /dev/null +++ b/tests/pos-macros/path-dependent-type-capture/Macro_1.scala @@ -0,0 +1,70 @@ +import scala.quoted.* + +trait A: + type T + val b: B + +trait B: + type T + def f: Unit + +trait C0: + type U + val d: D0 +trait D0: + type U + def h: Unit +object Macro: + inline def generateCode: Unit = ${ generateCodeExpr } + + def generateCodeExpr(using Quotes): Expr[Unit] = + '{ + $testLocalPathsGlobalClasses + $testLocalPathsLocalClasses + } + + def testLocalPathsGlobalClasses(using Quotes): Expr[Unit] = + '{ + type T + val a: A = ??? + ${ + val expr = '{ + val t: T = ??? + val aT: a.T = ??? + val abT: a.b.T = ??? + val aRef: a.type = ??? + aRef.b + aRef.b.f + val abRef: a.b.type = ??? + abRef.f + () + } + expr + } + } + + def testLocalPathsLocalClasses(using Quotes): Expr[Unit] = + '{ + type U + trait C extends C0: + type U + val d: D + trait D extends D0: + type U + def h: Unit + val c: C = ??? + ${ + val expr = '{ + val u: U = ??? + val cU: c.U = ??? + val cdU: c.d.U = ??? + val cRef: c.type = ??? + cRef.d + cRef.d.h + val cdRef: c.d.type = ??? + cdRef.h + () + } + expr + } + } diff --git a/tests/pos-macros/path-dependent-type-capture/Test_2.scala b/tests/pos-macros/path-dependent-type-capture/Test_2.scala new file mode 100644 index 000000000000..c12cd8d2436a --- /dev/null +++ b/tests/pos-macros/path-dependent-type-capture/Test_2.scala @@ -0,0 +1 @@ +@main def test = Macro.generateCode diff --git a/tests/pos-special/fatal-warnings/i10994.scala b/tests/pos-special/fatal-warnings/i10994.scala deleted file mode 100644 index 99ae647466b1..000000000000 --- a/tests/pos-special/fatal-warnings/i10994.scala +++ /dev/null @@ -1,2 +0,0 @@ -def foo = true match - case (b: Boolean): Boolean => () diff --git a/tests/pos-special/fatal-warnings/i16649-irrefutable.scala b/tests/pos-special/fatal-warnings/i16649-irrefutable.scala new file mode 100644 index 000000000000..b9aa6d2acf52 --- /dev/null +++ b/tests/pos-special/fatal-warnings/i16649-irrefutable.scala @@ -0,0 +1,7 @@ +import quoted.* + +def foo(using Quotes)(x: Expr[Int]) = + val '{ $y } = x + val '{ $a: Any } = x + val '{ $b: Int } = x + val '[List[Int]] = Type.of[List[Int]] diff --git a/tests/pos-special/fatal-warnings/i17314.scala b/tests/pos-special/fatal-warnings/i17314.scala new file mode 100644 index 000000000000..23f988741bed --- /dev/null +++ b/tests/pos-special/fatal-warnings/i17314.scala @@ -0,0 +1,33 @@ +// scalac: "-Wunused:all" + +import java.net.URI + +object circelike { + import scala.compiletime.summonInline + import scala.deriving.Mirror + + type Codec[T] + type Configuration + trait ConfiguredCodec[T] + object ConfiguredCodec: + inline final def derived[A](using conf: Configuration)(using + inline mirror: Mirror.Of[A] + ): ConfiguredCodec[A] = + new ConfiguredCodec[A]: + val codec = summonInline[Codec[URI]] // simplification +} + +object foo { + import circelike.{Codec, Configuration} + + given Configuration = ??? + given Codec[URI] = ??? +} + +object bar { + import circelike.Codec + import circelike.{Configuration, ConfiguredCodec} + import foo.{given Configuration, given Codec[URI]} + + case class Operator(url: URI) derives ConfiguredCodec +} diff --git a/tests/pos-special/fatal-warnings/i17314a.scala b/tests/pos-special/fatal-warnings/i17314a.scala new file mode 100644 index 000000000000..468b956fb04c --- /dev/null +++ b/tests/pos-special/fatal-warnings/i17314a.scala @@ -0,0 +1,12 @@ +// scalac: -Wunused:all + +package foo: + class Foo[T] + given Foo[Int] = new Foo[Int] + + +package bar: + import foo.{given foo.Foo[Int]} + import foo.Foo + + val repro: Foo[Int] = summon[Foo[Int]] diff --git a/tests/pos-special/isInstanceOf/i16899.scala b/tests/pos-special/isInstanceOf/i16899.scala new file mode 100644 index 000000000000..650e1e5c7b23 --- /dev/null +++ b/tests/pos-special/isInstanceOf/i16899.scala @@ -0,0 +1,5 @@ +sealed trait Unset + +def foo(v: Unset|Option[Int]): Unit = v match + case v: Unset => () + case v: Option[Int] => () diff --git a/tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala b/tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala new file mode 100644 index 000000000000..6b5bfbc3e00e --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala @@ -0,0 +1,232 @@ +package dotty.tools +package backend + +object ScalaPrimitivesOps extends ScalaPrimitivesOps + +class ScalaPrimitivesOps { + // Arithmetic unary operations + inline val POS = 1 // +x + inline val NEG = 2 // -x + inline val NOT = 3 // ~x + + // Arithmetic binary operations + inline val ADD = 10 // x + y + inline val SUB = 11 // x - y + inline val MUL = 12 // x * y + inline val DIV = 13 // x / y + inline val MOD = 14 // x % y + + // Bitwise operations + inline val OR = 20 // x | y + inline val XOR = 21 // x ^ y + inline val AND = 22 // x & y + + // Shift operations + inline val LSL = 30 // x << y + inline val LSR = 31 // x >>> y + inline val ASR = 32 // x >> y + + // Comparison operations + inline val ID = 40 // x eq y + inline val NI = 41 // x ne y + inline val EQ = 42 // x == y + inline val NE = 43 // x != y + inline val LT = 44 // x < y + inline val LE = 45 // x <= y + inline val GT = 46 // x > y + inline val GE = 47 // x >= y + + // Boolean unary operations + inline val ZNOT = 50 // !x + + // Boolean binary operations + inline val ZOR = 60 // x || y + inline val ZAND = 61 // x && y + + // Array operations + inline val LENGTH = 70 // x.length + inline val APPLY = 71 // x(y) + inline val UPDATE = 72 // x(y) = z + + // Any operations + inline val IS = 80 // x.is[y] + inline val AS = 81 // x.as[y] + inline val HASH = 87 // x.## + + // AnyRef operations + inline val SYNCHRONIZED = 90 // x.synchronized(y) + + // String operations + inline val CONCAT = 100 // String.valueOf(x)+String.valueOf(y) + + // coercions + inline val COERCE = 101 + + // RunTime operations + inline val BOX = 110 // RunTime.box_(x) + inline val UNBOX = 111 // RunTime.unbox_(x) + inline val NEW_ZARRAY = 112 // RunTime.zarray(x) + inline val NEW_BARRAY = 113 // RunTime.barray(x) + inline val NEW_SARRAY = 114 // RunTime.sarray(x) + inline val NEW_CARRAY = 115 // RunTime.carray(x) + inline val NEW_IARRAY = 116 // RunTime.iarray(x) + inline val NEW_LARRAY = 117 // RunTime.larray(x) + inline val NEW_FARRAY = 118 // RunTime.farray(x) + inline val NEW_DARRAY = 119 // RunTime.darray(x) + inline val NEW_OARRAY = 120 // RunTime.oarray(x) + + inline val ZARRAY_LENGTH = 131 // RunTime.zarray_length(x) + inline val BARRAY_LENGTH = 132 // RunTime.barray_length(x) + inline val SARRAY_LENGTH = 133 // RunTime.sarray_length(x) + inline val CARRAY_LENGTH = 134 // RunTime.carray_length(x) + inline val IARRAY_LENGTH = 135 // RunTime.iarray_length(x) + inline val LARRAY_LENGTH = 136 // RunTime.larray_length(x) + inline val FARRAY_LENGTH = 137 // RunTime.farray_length(x) + inline val DARRAY_LENGTH = 138 // RunTime.darray_length(x) + inline val OARRAY_LENGTH = 139 // RunTime.oarray_length(x) + + inline val ZARRAY_GET = 140 // RunTime.zarray_get(x,y) + inline val BARRAY_GET = 141 // RunTime.barray_get(x,y) + inline val SARRAY_GET = 142 // RunTime.sarray_get(x,y) + inline val CARRAY_GET = 143 // RunTime.carray_get(x,y) + inline val IARRAY_GET = 144 // RunTime.iarray_get(x,y) + inline val LARRAY_GET = 145 // RunTime.larray_get(x,y) + inline val FARRAY_GET = 146 // RunTime.farray_get(x,y) + inline val DARRAY_GET = 147 // RunTime.darray_get(x,y) + inline val OARRAY_GET = 148 // RunTime.oarray_get(x,y) + + inline val ZARRAY_SET = 150 // RunTime.zarray(x,y,z) + inline val BARRAY_SET = 151 // RunTime.barray(x,y,z) + inline val SARRAY_SET = 152 // RunTime.sarray(x,y,z) + inline val CARRAY_SET = 153 // RunTime.carray(x,y,z) + inline val IARRAY_SET = 154 // RunTime.iarray(x,y,z) + inline val LARRAY_SET = 155 // RunTime.larray(x,y,z) + inline val FARRAY_SET = 156 // RunTime.farray(x,y,z) + inline val DARRAY_SET = 157 // RunTime.darray(x,y,z) + inline val OARRAY_SET = 158 // RunTime.oarray(x,y,z) + + inline val B2B = 200 // RunTime.b2b(x) + inline val B2S = 201 // RunTime.b2s(x) + inline val B2C = 202 // RunTime.b2c(x) + inline val B2I = 203 // RunTime.b2i(x) + inline val B2L = 204 // RunTime.b2l(x) + inline val B2F = 205 // RunTime.b2f(x) + inline val B2D = 206 // RunTime.b2d(x) + + inline val S2B = 210 // RunTime.s2b(x) + inline val S2S = 211 // RunTime.s2s(x) + inline val S2C = 212 // RunTime.s2c(x) + inline val S2I = 213 // RunTime.s2i(x) + inline val S2L = 214 // RunTime.s2l(x) + inline val S2F = 215 // RunTime.s2f(x) + inline val S2D = 216 // RunTime.s2d(x) + + inline val C2B = 220 // RunTime.c2b(x) + inline val C2S = 221 // RunTime.c2s(x) + inline val C2C = 222 // RunTime.c2c(x) + inline val C2I = 223 // RunTime.c2i(x) + inline val C2L = 224 // RunTime.c2l(x) + inline val C2F = 225 // RunTime.c2f(x) + inline val C2D = 226 // RunTime.c2d(x) + + inline val I2B = 230 // RunTime.i2b(x) + inline val I2S = 231 // RunTime.i2s(x) + inline val I2C = 232 // RunTime.i2c(x) + inline val I2I = 233 // RunTime.i2i(x) + inline val I2L = 234 // RunTime.i2l(x) + inline val I2F = 235 // RunTime.i2f(x) + inline val I2D = 236 // RunTime.i2d(x) + + inline val L2B = 240 // RunTime.l2b(x) + inline val L2S = 241 // RunTime.l2s(x) + inline val L2C = 242 // RunTime.l2c(x) + inline val L2I = 243 // RunTime.l2i(x) + inline val L2L = 244 // RunTime.l2l(x) + inline val L2F = 245 // RunTime.l2f(x) + inline val L2D = 246 // RunTime.l2d(x) + + inline val F2B = 250 // RunTime.f2b(x) + inline val F2S = 251 // RunTime.f2s(x) + inline val F2C = 252 // RunTime.f2c(x) + inline val F2I = 253 // RunTime.f2i(x) + inline val F2L = 254 // RunTime.f2l(x) + inline val F2F = 255 // RunTime.f2f(x) + inline val F2D = 256 // RunTime.f2d(x) + + inline val D2B = 260 // RunTime.d2b(x) + inline val D2S = 261 // RunTime.d2s(x) + inline val D2C = 262 // RunTime.d2c(x) + inline val D2I = 263 // RunTime.d2i(x) + inline val D2L = 264 // RunTime.d2l(x) + inline val D2F = 265 // RunTime.d2f(x) + inline val D2D = 266 // RunTime.d2d(x) + + /** Check whether the given operation code is an array operation. */ + def isArrayOp(code: Int): Boolean = + isArrayNew(code) | isArrayLength(code) | isArrayGet(code) | isArraySet(code) + + def isArrayNew(code: Int): Boolean = code match { + case NEW_ZARRAY | NEW_BARRAY | NEW_SARRAY | NEW_CARRAY | + NEW_IARRAY | NEW_LARRAY | NEW_FARRAY | NEW_DARRAY | + NEW_OARRAY => true + case _ => false + } + + def isArrayLength(code: Int): Boolean = code match { + case ZARRAY_LENGTH | BARRAY_LENGTH | SARRAY_LENGTH | CARRAY_LENGTH | + IARRAY_LENGTH | LARRAY_LENGTH | FARRAY_LENGTH | DARRAY_LENGTH | + OARRAY_LENGTH | LENGTH => true + case _ => false + } + + def isArrayGet(code: Int): Boolean = code match { + case ZARRAY_GET | BARRAY_GET | SARRAY_GET | CARRAY_GET | + IARRAY_GET | LARRAY_GET | FARRAY_GET | DARRAY_GET | + OARRAY_GET | APPLY => true + case _ => false + } + + def isArraySet(code: Int): Boolean = code match { + case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET | + IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET | + OARRAY_SET | UPDATE => true + case _ => false + } + + /** Check whether the given code is a comparison operator */ + def isComparisonOp(code: Int): Boolean = code match { + case ID | NI | EQ | NE | + LT | LE | GT | GE => true + + case _ => false + } + def isUniversalEqualityOp(code: Int): Boolean = (code == EQ) || (code == NE) + def isReferenceEqualityOp(code: Int): Boolean = (code == ID) || (code == NI) + + def isArithmeticOp(code: Int): Boolean = code match { + case POS | NEG | NOT => true; // unary + case ADD | SUB | MUL | + DIV | MOD => true; // binary + case OR | XOR | AND | + LSL | LSR | ASR => true; // bitwise + case _ => false + } + + def isLogicalOp(code: Int): Boolean = code match { + case ZNOT | ZAND | ZOR => true + case _ => false + } + + def isShiftOp(code: Int): Boolean = code match { + case LSL | LSR | ASR => true + case _ => false + } + + def isBitwiseOp(code: Int): Boolean = code match { + case OR | XOR | AND => true + case _ => false + } + + def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D) + +} diff --git a/tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala b/tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala new file mode 100644 index 000000000000..b3d98d425b2a --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala @@ -0,0 +1,57 @@ +package dotty.tools +package backend + +/** + * Simple implementation of a worklist algorithm. A processing + * function is applied repeatedly to the first element in the + * worklist, as long as the stack is not empty. + * + * The client class should mix-in this class and initialize the worklist + * field and define the `processElement` method. Then call the `run` method + * providing a function that initializes the worklist. + * + * @author Martin Odersky + * @version 1.0 + * @see [[scala.tools.nsc.backend.icode.Linearizers]] + */ +trait WorklistAlgorithm { + type Elem + class WList { + private var list: List[Elem] = Nil + def isEmpty = list.isEmpty + def nonEmpty = !isEmpty + def push(e: Elem): Unit = { list = e :: list } + def pop(): Elem = { + val head = list.head + list = list.tail + head + } + def pushAll(xs: Iterable[Elem]): Unit = xs.foreach(push) + def clear(): Unit = list = Nil + + } + + val worklist: WList + + /** + * Run the iterative algorithm until the worklist remains empty. + * The initializer is run once before the loop starts and should + * initialize the worklist. + */ + def run(initWorklist: => Unit) = { + initWorklist + + while (worklist.nonEmpty) + processElement(dequeue) + } + + /** + * Process the current element from the worklist. + */ + def processElement(e: Elem): Unit + + /** + * Remove and return the first element to be processed from the worklist. + */ + def dequeue: Elem +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala b/tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala new file mode 100644 index 000000000000..e6393ce82054 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala @@ -0,0 +1,65 @@ +package dotty.tools +package backend +package jvm + +import scala.language.unsafeNulls + +import scala.tools.asm.tree.{AbstractInsnNode} +import java.io.PrintWriter +import scala.tools.asm.util.{TraceClassVisitor, TraceMethodVisitor, Textifier} +import scala.tools.asm.ClassReader + +object AsmUtils { + + /** + * Print the bytecode of methods generated by GenBCode to the standard output. Only methods + * whose name contains `traceMethodPattern` are traced. + */ + final val traceMethodEnabled = sys.env.contains("printBCODE") + final val traceMethodPattern = sys.env.getOrElse("printBCODE", "") + + /** + * Print the bytecode of classes generated by GenBCode to the standard output. + */ + inline val traceClassEnabled = false + inline val traceClassPattern = "" + + /** + * Print the bytedcode of classes as they are serialized by the ASM library. The serialization + * performed by `asm.ClassWriter` can change the code generated by GenBCode. For example, it + * introduces stack map frames, it computes the maximal stack sizes, and it replaces dead + * code by NOPs (see also https://github.com/scala/scala/pull/3726#issuecomment-42861780). + */ + inline val traceSerializedClassEnabled = false + inline val traceSerializedClassPattern = "" + + def traceMethod(mnode: MethodNode1): Unit = { + println(s"Bytecode for method ${mnode.name}") + val p = new Textifier + val tracer = new TraceMethodVisitor(p) + mnode.accept(tracer) + val w = new PrintWriter(System.out) + p.print(w) + w.flush() + } + + def traceClass(cnode: ClassNode1): Unit = { + println(s"Bytecode for class ${cnode.name}") + val w = new PrintWriter(System.out) + cnode.accept(new TraceClassVisitor(w)) + w.flush() + } + + def traceClass(bytes: Array[Byte]): Unit = traceClass(readClass(bytes)) + + def readClass(bytes: Array[Byte]): ClassNode1 = { + val node = new ClassNode1() + new ClassReader(bytes).accept(node, 0) + node + } + + def instructionString(instruction: AbstractInsnNode): String = instruction.getOpcode match { + case -1 => instruction.toString + case op => scala.tools.asm.util.Printer.OPCODES(op) + } +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala new file mode 100644 index 000000000000..d95638be2695 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala @@ -0,0 +1,158 @@ +package dotty.tools +package backend +package jvm + +import scala.language.unsafeNulls + +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.report + +/** + * This trait contains code shared between GenBCode and GenASM that depends on types defined in + * the compiler cake (Global). + */ +final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { + import interface.given + import DottyBackendInterface.symExtensions + + /** + * True if `classSym` is an anonymous class or a local class. I.e., false if `classSym` is a + * member class. This method is used to decide if we should emit an EnclosingMethod attribute. + * It is also used to decide whether the "owner" field in the InnerClass attribute should be + * null. + */ + def isAnonymousOrLocalClass(classSym: Symbol): Boolean = { + assert(classSym.isClass, s"not a class: $classSym") + // Here used to be an `assert(!classSym.isDelambdafyFunction)`: delambdafy lambda classes are + // always top-level. However, SI-8900 shows an example where the weak name-based implementation + // of isDelambdafyFunction failed (for a function declared in a package named "lambda"). + classSym.isAnonymousClass || { + val originalOwner = classSym.originalOwner + originalOwner != NoSymbol && !originalOwner.isClass + } + } + + /** + * Returns the enclosing method for non-member classes. In the following example + * + * class A { + * def f = { + * class B { + * class C + * } + * } + * } + * + * the method returns Some(f) for B, but None for C, because C is a member class. For non-member + * classes that are not enclosed by a method, it returns None: + * + * class A { + * { class B } + * } + * + * In this case, for B, we return None. + * + * The EnclosingMethod attribute needs to be added to non-member classes (see doc in BTypes). + * This is a source-level property, so we need to use the originalOwner chain to reconstruct it. + */ + private def enclosingMethodForEnclosingMethodAttribute(classSym: Symbol): Option[Symbol] = { + assert(classSym.isClass, classSym) + def enclosingMethod(sym: Symbol): Option[Symbol] = { + if (sym.isClass || sym == NoSymbol) None + else if (sym.is(Method)) Some(sym) + else enclosingMethod(sym.originalOwner) + } + enclosingMethod(classSym.originalOwner) + } + + /** + * The enclosing class for emitting the EnclosingMethod attribute. Since this is a source-level + * property, this method looks at the originalOwner chain. See doc in BTypes. + */ + private def enclosingClassForEnclosingMethodAttribute(classSym: Symbol): Symbol = { + assert(classSym.isClass, classSym) + def enclosingClass(sym: Symbol): Symbol = { + if (sym.isClass) sym + else enclosingClass(sym.originalOwner.originalLexicallyEnclosingClass) + } + enclosingClass(classSym.originalOwner.originalLexicallyEnclosingClass) + } + + /*final*/ case class EnclosingMethodEntry(owner: String, name: String, methodDescriptor: String) + + /** + * Data for emitting an EnclosingMethod attribute. None if `classSym` is a member class (not + * an anonymous or local class). See doc in BTypes. + * + * The class is parametrized by two functions to obtain a bytecode class descriptor for a class + * symbol, and to obtain a method signature descriptor fro a method symbol. These function depend + * on the implementation of GenASM / GenBCode, so they need to be passed in. + */ + def enclosingMethodAttribute(classSym: Symbol, classDesc: Symbol => String, methodDesc: Symbol => String): Option[EnclosingMethodEntry] = { + if (isAnonymousOrLocalClass(classSym)) { + val methodOpt = enclosingMethodForEnclosingMethodAttribute(classSym) + report.debuglog(s"enclosing method for $classSym is $methodOpt (in ${methodOpt.map(_.enclosingClass)})") + Some(EnclosingMethodEntry( + classDesc(enclosingClassForEnclosingMethodAttribute(classSym)), + methodOpt.map(_.javaSimpleName).orNull, + methodOpt.map(methodDesc).orNull)) + } else { + None + } + } +} + +object BCodeAsmCommon{ + def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = { + val ca = new Array[Char](bytes.length) + var idx = 0 + while(idx < bytes.length) { + val b: Byte = bytes(idx) + assert((b & ~0x7f) == 0) + ca(idx) = b.asInstanceOf[Char] + idx += 1 + } + + ca + } + + final def arrEncode(bSeven: Array[Byte]): Array[String] = { + var strs: List[String] = Nil + // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure) + var prevOffset = 0 + var offset = 0 + var encLength = 0 + while(offset < bSeven.length) { + val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1) + val newEncLength = encLength.toLong + deltaEncLength + if(newEncLength >= 65535) { + val ba = bSeven.slice(prevOffset, offset) + strs ::= new java.lang.String(ubytesToCharArray(ba)) + encLength = 0 + prevOffset = offset + } else { + encLength += deltaEncLength + offset += 1 + } + } + if(prevOffset < offset) { + assert(offset == bSeven.length) + val ba = bSeven.slice(prevOffset, offset) + strs ::= new java.lang.String(ubytesToCharArray(ba)) + } + assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict? + strs.reverse.toArray + } + + + def strEncode(bSeven: Array[Byte]): String = { + val ca = ubytesToCharArray(bSeven) + new java.lang.String(ca) + // debug val bvA = new asm.ByteVector; bvA.putUTF8(s) + // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes) + // debug assert(enc(idx) == bvA.getByte(idx + 2)) + // debug assert(bvA.getLength == enc.size + 2) + } + +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala new file mode 100644 index 000000000000..bf10e37943a8 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala @@ -0,0 +1,1776 @@ +package dotty.tools +package backend +package jvm + +import scala.language.unsafeNulls + +import scala.annotation.switch +import scala.collection.mutable.SortedMap + +import scala.tools.asm +import scala.tools.asm.{Handle, Opcodes} +import BCodeHelpers.InvokeStyle + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Flags.{Label => LabelFlag, _} +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.StdNames.{nme, str} +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.transform.Erasure +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Phases._ +import dotty.tools.dotc.core.Decorators.em +import dotty.tools.dotc.report + +/* + * + * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ + * @version 1.0 + * + */ +trait BCodeBodyBuilder extends BCodeSkelBuilder { + // import global._ + // import definitions._ + import tpd._ + import int.{_, given} + import DottyBackendInterface.symExtensions + import bTypes._ + import coreBTypes._ + + protected val primitives: DottyPrimitives + + /* + * Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions. + */ + abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) { + + import Primitives.TestOp + + /* ---------------- helper utils for generating methods and code ---------------- */ + + def emit(opc: Int): Unit = { mnode.visitInsn(opc) } + + def emitZeroOf(tk: BType): Unit = { + tk match { + case BOOL => bc.boolconst(false) + case BYTE | + SHORT | + CHAR | + INT => bc.iconst(0) + case LONG => bc.lconst(0) + case FLOAT => bc.fconst(0) + case DOUBLE => bc.dconst(0) + case UNIT => () + case _ => emit(asm.Opcodes.ACONST_NULL) + } + } + + /* + * Emits code that adds nothing to the operand stack. + * Two main cases: `tree` is an assignment, + * otherwise an `adapt()` to UNIT is performed if needed. + */ + def genStat(tree: Tree): Unit = { + lineNumber(tree) + + tree match { + case Assign(lhs @ DesugaredSelect(qual, _), rhs) => + val isStatic = lhs.symbol.isStaticMember + if (!isStatic) { genLoadQualifier(lhs) } + genLoad(rhs, symInfoTK(lhs.symbol)) + lineNumber(tree) + // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError + val receiverClass = qual.tpe.typeSymbol + fieldStore(lhs.symbol, receiverClass) + + case Assign(lhs, rhs) => + val s = lhs.symbol + val Local(tk, _, idx, _) = locals.getOrMakeLocal(s) + + rhs match { + case Apply(Select(larg: Ident, nme.ADD), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && x.isShortRange => + lineNumber(tree) + bc.iinc(idx, x.intValue) + + case Apply(Select(larg: Ident, nme.SUB), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && Constant(-x.intValue).isShortRange => + lineNumber(tree) + bc.iinc(idx, -x.intValue) + + case _ => + genLoad(rhs, tk) + lineNumber(tree) + bc.store(idx, tk) + } + + case _ => + genLoad(tree, UNIT) + } + } + + /* Generate code for primitive arithmetic operations. */ + def genArithmeticOp(tree: Tree, code: Int): BType = tree match{ + case Apply(fun @ DesugaredSelect(larg, _), args) => + var resKind = tpeTK(larg) + + assert(resKind.isNumericType || (resKind == BOOL), + s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]") + + import ScalaPrimitivesOps._ + + args match { + // unary operation + case Nil => + genLoad(larg, resKind) + code match { + case POS => () // nothing + case NEG => bc.neg(resKind) + case NOT => bc.genPrimitiveArithmetic(Primitives.NOT, resKind) + case _ => abort(s"Unknown unary operation: ${fun.symbol.showFullName} code: $code") + } + + // binary operation + case rarg :: Nil => + val isShift = isShiftOp(code) + resKind = tpeTK(larg).maxType(if (isShift) INT else tpeTK(rarg)) + + if (isShift || isBitwiseOp(code)) { + assert(resKind.isIntegralType || (resKind == BOOL), + s"$resKind incompatible with arithmetic modulo operation.") + } + + genLoad(larg, resKind) + genLoad(rarg, if (isShift) INT else resKind) + + (code: @switch) match { + case ADD => bc add resKind + case SUB => bc sub resKind + case MUL => bc mul resKind + case DIV => bc div resKind + case MOD => bc rem resKind + + case OR | XOR | AND => bc.genPrimitiveLogical(code, resKind) + + case LSL | LSR | ASR => bc.genPrimitiveShift(code, resKind) + + case _ => abort(s"Unknown primitive: ${fun.symbol}[$code]") + } + + case _ => + abort(s"Too many arguments for primitive function: $tree") + } + lineNumber(tree) + resKind + } + + /* Generate primitive array operations. */ + def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = tree match{ + + case Apply(DesugaredSelect(arrayObj, _), args) => + import ScalaPrimitivesOps._ + val k = tpeTK(arrayObj) + genLoad(arrayObj, k) + val elementType = typeOfArrayOp.getOrElse[bTypes.BType](code, abort(s"Unknown operation on arrays: $tree code: $code")) + + var generatedType = expectedType + + if (isArrayGet(code)) { + // load argument on stack + assert(args.length == 1, s"Too many arguments for array get operation: $tree"); + genLoad(args.head, INT) + generatedType = k.asArrayBType.componentType + bc.aload(elementType) + } + else if (isArraySet(code)) { + val List(a1, a2) = args + genLoad(a1, INT) + genLoad(a2) + generatedType = UNIT + bc.astore(elementType) + } else { + generatedType = INT + emit(asm.Opcodes.ARRAYLENGTH) + } + lineNumber(tree) + + generatedType + } + + def genLoadIfTo(tree: If, expectedType: BType, dest: LoadDestination): BType = tree match{ + case If(condp, thenp, elsep) => + + val success = new asm.Label + val failure = new asm.Label + + val hasElse = !elsep.isEmpty && (elsep match { + case Literal(value) if value.tag == UnitTag => false + case _ => true + }) + + genCond(condp, success, failure, targetIfNoJump = success) + markProgramPoint(success) + + if dest == LoadDestination.FallThrough then + if hasElse then + val thenKind = tpeTK(thenp) + val elseKind = tpeTK(elsep) + def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT) && expectedType == UNIT + val resKind = if (hasUnitBranch) UNIT else tpeTK(tree) + + val postIf = new asm.Label + genLoadTo(thenp, resKind, LoadDestination.Jump(postIf)) + markProgramPoint(failure) + genLoadTo(elsep, resKind, LoadDestination.FallThrough) + markProgramPoint(postIf) + resKind + else + genLoad(thenp, UNIT) + markProgramPoint(failure) + UNIT + end if + else + genLoadTo(thenp, expectedType, dest) + markProgramPoint(failure) + if hasElse then + genLoadTo(elsep, expectedType, dest) + else + genAdaptAndSendToDest(UNIT, expectedType, dest) + expectedType + end if + } + + def genPrimitiveOp(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { + case Apply(fun @ DesugaredSelect(receiver, _), _) => + val sym = tree.symbol + + val code = primitives.getPrimitive(tree, receiver.tpe) + + import ScalaPrimitivesOps._ + + if (isArithmeticOp(code)) genArithmeticOp(tree, code) + else if (code == CONCAT) genStringConcat(tree) + else if (code == HASH) genScalaHash(receiver) + else if (isArrayOp(code)) genArrayOp(tree, code, expectedType) + else if (isLogicalOp(code) || isComparisonOp(code)) { + val success, failure, after = new asm.Label + genCond(tree, success, failure, targetIfNoJump = success) + // success block + markProgramPoint(success) + bc boolconst true + bc goTo after + // failure block + markProgramPoint(failure) + bc boolconst false + // after + markProgramPoint(after) + + BOOL + } + else if (isCoercion(code)) { + genLoad(receiver) + lineNumber(tree) + genCoercion(code) + coercionTo(code) + } + else abort( + s"Primitive operation not handled yet: ${sym.showFullName}(${fun.symbol.name}) at: ${tree.span}" + ) + } + + def genLoad(tree: Tree): Unit = { + genLoad(tree, tpeTK(tree)) + } + + /* Generate code for trees that produce values on the stack */ + def genLoad(tree: Tree, expectedType: BType): Unit = + genLoadTo(tree, expectedType, LoadDestination.FallThrough) + + /* Generate code for trees that produce values, sent to a given `LoadDestination`. */ + def genLoadTo(tree: Tree, expectedType: BType, dest: LoadDestination): Unit = + var generatedType = expectedType + var generatedDest = LoadDestination.FallThrough + + lineNumber(tree) + + tree match { + case tree@ValDef(_, _, _) => + val sym = tree.symbol + /* most of the time, !locals.contains(sym), unless the current activation of genLoad() is being called + while duplicating a finalizer that contains this ValDef. */ + val loc = locals.getOrMakeLocal(sym) + val Local(tk, _, idx, isSynth) = loc + if (tree.rhs == tpd.EmptyTree) { emitZeroOf(tk) } + else { genLoad(tree.rhs, tk) } + bc.store(idx, tk) + val localVarStart = currProgramPoint() + if (!isSynth) { // there are case ValDef's emitted by patmat + varsInScope ::= (sym -> localVarStart) + } + generatedType = UNIT + + case t @ If(_, _, _) => + generatedType = genLoadIfTo(t, expectedType, dest) + generatedDest = dest + + case t @ Labeled(_, _) => + generatedType = genLabeledTo(t, expectedType, dest) + generatedDest = dest + + case r: Return => + genReturn(r) + generatedDest = LoadDestination.Return + + case t @ WhileDo(_, _) => + generatedDest = genWhileDo(t) + generatedType = UNIT + + case t @ Try(_, _, _) => + generatedType = genLoadTry(t) + + case t: Apply if t.fun.symbol eq defn.throwMethod => + val thrownExpr = t.args.head + val thrownKind = tpeTK(thrownExpr) + genLoadTo(thrownExpr, thrownKind, LoadDestination.Throw) + generatedDest = LoadDestination.Throw + + case New(tpt) => + abort(s"Unexpected New(${tpt.tpe.showSummary()}/$tpt) reached GenBCode.\n" + + " Call was genLoad" + ((tree, expectedType))) + + case t @ Closure(env, call, tpt) => + val functionalInterface: Symbol = + if !tpt.isEmpty then tpt.tpe.classSymbol + else t.tpe.classSymbol + val (fun, args) = call match { + case Apply(fun, args) => (fun, args) + case t @ DesugaredSelect(_, _) => (t, Nil) // TODO: use Select + case t @ Ident(_) => (t, Nil) + } + + if (!fun.symbol.isStaticMember) { + // load receiver of non-static implementation of lambda + + // darkdimius: I haven't found in spec `this` reference should go + // but I was able to derrive it by reading + // AbstractValidatingLambdaMetafactory.validateMetafactoryArgs + + val DesugaredSelect(prefix, _) = fun: @unchecked + genLoad(prefix) + } + + genLoadArguments(env, fun.symbol.info.firstParamTypes map toTypeKind) + generatedType = genInvokeDynamicLambda(NoSymbol, fun.symbol, env.size, functionalInterface) + + case app @ Apply(_, _) => + generatedType = genApply(app, expectedType) + + case This(qual) => + val symIsModuleClass = tree.symbol.is(ModuleClass) + assert(tree.symbol == claszSymbol || symIsModuleClass, + s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $claszSymbol compilation unit: $cunit") + if (symIsModuleClass && tree.symbol != claszSymbol) { + generatedType = genLoadModule(tree) + } + else { + mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) + // When compiling Array.scala, the constructor invokes `Array.this.super.`. The expectedType + // is `[Object` (computed by typeToBType, the type of This(Array) is `Array[T]`). If we would set + // the generatedType to `Array` below, the call to adapt at the end would fail. The situation is + // similar for primitives (`I` vs `Int`). + if (tree.symbol != defn.ArrayClass && !tree.symbol.isPrimitiveValueClass) { + generatedType = classBTypeFromSymbol(claszSymbol) + } + } + + case DesugaredSelect(Ident(nme.EMPTY_PACKAGE), module) => + assert(tree.symbol.is(Module), s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.span}") + genLoadModule(tree) + + case DesugaredSelect(qualifier, _) => + val sym = tree.symbol + generatedType = symInfoTK(sym) + val qualSafeToElide = tpd.isIdempotentExpr(qualifier) + + def genLoadQualUnlessElidable(): Unit = { if (!qualSafeToElide) { genLoadQualifier(tree) } } + + // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError + def receiverClass = qualifier.tpe.typeSymbol + if (sym.is(Module)) { + genLoadQualUnlessElidable() + genLoadModule(tree) + } else if (sym.isStaticMember) { + genLoadQualUnlessElidable() + fieldLoad(sym, receiverClass) + } else { + genLoadQualifier(tree) + fieldLoad(sym, receiverClass) + } + + case t @ Ident(name) => + val sym = tree.symbol + val tk = symInfoTK(sym) + generatedType = tk + + val desugared = cachedDesugarIdent(t) + desugared match { + case None => + if (!sym.is(Package)) { + if (sym.is(Module)) genLoadModule(sym) + else locals.load(sym) + } + case Some(t) => + genLoad(t, generatedType) + } + + case Literal(value) => + if (value.tag != UnitTag) (value.tag, expectedType) match { + case (IntTag, LONG ) => bc.lconst(value.longValue); generatedType = LONG + case (FloatTag, DOUBLE) => bc.dconst(value.doubleValue); generatedType = DOUBLE + case (NullTag, _ ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = srNullRef + case _ => genConstant(value); generatedType = tpeTK(tree) + } + + case blck @ Block(stats, expr) => + if(stats.isEmpty) + genLoadTo(expr, expectedType, dest) + else + genBlockTo(blck, expectedType, dest) + generatedDest = dest + + case Typed(Super(_, _), _) => + genLoadTo(tpd.This(claszSymbol.asClass), expectedType, dest) + generatedDest = dest + + case Typed(expr, _) => + genLoadTo(expr, expectedType, dest) + generatedDest = dest + + case Assign(_, _) => + generatedType = UNIT + genStat(tree) + + case av @ ArrayValue(_, _) => + generatedType = genArrayValue(av) + + case mtch @ Match(_, _) => + generatedType = genMatchTo(mtch, expectedType, dest) + generatedDest = dest + + case tpd.EmptyTree => if (expectedType != UNIT) { emitZeroOf(expectedType) } + + + case t: TypeApply => // dotty specific + generatedType = genTypeApply(t) + + case _ => abort(s"Unexpected tree in genLoad: $tree/${tree.getClass} at: ${tree.span}") + } + + // emit conversion and send to the right destination + if generatedDest == LoadDestination.FallThrough then + genAdaptAndSendToDest(generatedType, expectedType, dest) + end genLoadTo + + def genAdaptAndSendToDest(generatedType: BType, expectedType: BType, dest: LoadDestination): Unit = + if generatedType != expectedType then + adapt(generatedType, expectedType) + + dest match + case LoadDestination.FallThrough => + () + case LoadDestination.Jump(label) => + bc goTo label + case LoadDestination.Return => + bc emitRETURN returnType + case LoadDestination.Throw => + val thrownType = expectedType + // `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable. + // Similarly for scala.Nothing (again, as defined in src/libray-aux). + assert(thrownType.isNullType || thrownType.isNothingType || thrownType.asClassBType.isSubtypeOf(jlThrowableRef)) + emit(asm.Opcodes.ATHROW) + end genAdaptAndSendToDest + + // ---------------- field load and store ---------------- + + /* + * must-single-thread + */ + def fieldLoad( field: Symbol, hostClass: Symbol = null): Unit = fieldOp(field, isLoad = true, hostClass) + + /* + * must-single-thread + */ + def fieldStore(field: Symbol, hostClass: Symbol = null): Unit = fieldOp(field, isLoad = false, hostClass) + + /* + * must-single-thread + */ + private def fieldOp(field: Symbol, isLoad: Boolean, specificReceiver: Symbol): Unit = { + val useSpecificReceiver = specificReceiver != null && !field.isScalaStatic + + val owner = internalName(if (useSpecificReceiver) specificReceiver else field.owner) + val fieldJName = field.javaSimpleName + val fieldDescr = symInfoTK(field).descriptor + val isStatic = field.isStaticMember + val opc = + if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD } + else { if (isStatic) asm.Opcodes.PUTSTATIC else asm.Opcodes.PUTFIELD } + mnode.visitFieldInsn(opc, owner, fieldJName, fieldDescr) + + } + + // ---------------- emitting constant values ---------------- + + /* + * For ClazzTag: + * must-single-thread + * Otherwise it's safe to call from multiple threads. + */ + def genConstant(const: Constant): Unit = { + (const.tag/*: @switch*/) match { + + case BooleanTag => bc.boolconst(const.booleanValue) + + case ByteTag => bc.iconst(const.byteValue) + case ShortTag => bc.iconst(const.shortValue) + case CharTag => bc.iconst(const.charValue) + case IntTag => bc.iconst(const.intValue) + + case LongTag => bc.lconst(const.longValue) + case FloatTag => bc.fconst(const.floatValue) + case DoubleTag => bc.dconst(const.doubleValue) + + case UnitTag => () + + case StringTag => + assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant` + mnode.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag + + case NullTag => emit(asm.Opcodes.ACONST_NULL) + + case ClazzTag => + val tp = toTypeKind(const.typeValue) + if tp.isPrimitive then + val boxedClass = boxedClassOfPrimitive(tp.asPrimitiveBType) + mnode.visitFieldInsn( + asm.Opcodes.GETSTATIC, + boxedClass.internalName, + "TYPE", // field name + jlClassRef.descriptor + ) + else + mnode.visitLdcInsn(tp.toASMType) + + case _ => abort(s"Unknown constant value: $const") + } + } + + private def genLabeledTo(tree: Labeled, expectedType: BType, dest: LoadDestination): BType = tree match { + case Labeled(bind, expr) => + + val labelSym = bind.symbol + + if dest == LoadDestination.FallThrough then + val resKind = tpeTK(tree) + val jumpTarget = new asm.Label + registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget)) + genLoad(expr, resKind) + markProgramPoint(jumpTarget) + resKind + else + registerJumpDest(labelSym, expectedType, dest) + genLoadTo(expr, expectedType, dest) + expectedType + end if + } + + private def genReturn(r: Return): Unit = { + val expr: Tree = r.expr + val fromSym: Symbol = if (r.from.symbol.is(LabelFlag)) r.from.symbol else NoSymbol + + if (NoSymbol == fromSym) { + // return from enclosing method + cleanups match { + case Nil => + // not an assertion: !shouldEmitCleanup (at least not yet, pendingCleanups() may still have to run, and reset `shouldEmitCleanup`. + genLoadTo(expr, returnType, LoadDestination.Return) + case nextCleanup :: rest => + genLoad(expr, returnType) + lineNumber(r) + val saveReturnValue = (returnType != UNIT) + if (saveReturnValue) { + // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted. + if (earlyReturnVar == null) { + earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar", expr.tpe, expr.span) + } + locals.store(earlyReturnVar) + } + bc goTo nextCleanup + shouldEmitCleanup = true + } + } else { + // return from labeled + assert(fromSym.is(LabelFlag), fromSym) + assert(!fromSym.is(Method), fromSym) + + /* TODO At the moment, we disregard cleanups, because by construction we don't have return-from-labels + * that cross cleanup boundaries. However, in theory such crossings are valid, so we should take care + * of them. + */ + val (exprExpectedType, exprDest) = findJumpDest(fromSym) + genLoadTo(expr, exprExpectedType, exprDest) + } + } // end of genReturn() + + def genWhileDo(tree: WhileDo): LoadDestination = tree match{ + case WhileDo(cond, body) => + + val isInfinite = cond == tpd.EmptyTree + + val loop = new asm.Label + markProgramPoint(loop) + + if isInfinite then + val dest = LoadDestination.Jump(loop) + genLoadTo(body, UNIT, dest) + dest + else + body match + case Literal(value) if value.tag == UnitTag => + // this is the shape of do..while loops + val exitLoop = new asm.Label + genCond(cond, loop, exitLoop, targetIfNoJump = exitLoop) + markProgramPoint(exitLoop) + case _ => + val success = new asm.Label + val failure = new asm.Label + genCond(cond, success, failure, targetIfNoJump = success) + markProgramPoint(success) + genLoadTo(body, UNIT, LoadDestination.Jump(loop)) + markProgramPoint(failure) + end match + LoadDestination.FallThrough + } + + def genTypeApply(t: TypeApply): BType = (t: @unchecked) match { + case TypeApply(fun@DesugaredSelect(obj, _), targs) => + + val sym = fun.symbol + val cast = + if (sym == defn.Any_isInstanceOf) false + else if (sym == defn.Any_asInstanceOf) true + else abort(s"Unexpected type application $fun[sym: ${sym.showFullName}] in: $t") + val l = tpeTK(obj) + val r = tpeTK(targs.head) + genLoadQualifier(fun) + + // TODO @lry make pattern match + if (l.isPrimitive && r.isPrimitive) + genConversion(l, r, cast) + else if (l.isPrimitive) { + bc drop l + if (cast) { + mnode.visitTypeInsn(asm.Opcodes.NEW, jlClassCastExceptionRef.internalName) + bc dup ObjectRef + emit(asm.Opcodes.ATHROW) + } else { + bc boolconst false + } + } + else if (r.isPrimitive && cast) { + abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $t") + } + else if (r.isPrimitive) { + bc isInstance boxedClassOfPrimitive(r.asPrimitiveBType) + } + else { + assert(r.isRef, r) // ensure that it's not a method + genCast(r.asRefBType, cast) + } + + if (cast) r else BOOL + } // end of genTypeApply() + + + private def mkArrayConstructorCall(arr: ArrayBType, app: Apply, args: List[Tree]) = { + val dims = arr.dimension + var elemKind = arr.elementType + val argsSize = args.length + if (argsSize > dims) { + report.error(em"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)", ctx.source.atSpan(app.span)) + } + if (argsSize < dims) { + /* In one step: + * elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize) + * however the above does not enter a TypeName for each nested arrays in chrs. + */ + for (i <- args.length until dims) elemKind = ArrayBType(elemKind) + } + genLoadArguments(args, List.fill(args.size)(INT)) + (argsSize /*: @switch*/) match { + case 1 => bc newarray elemKind + case _ => + val descr = ("[" * argsSize) + elemKind.descriptor // denotes the same as: arrayN(elemKind, argsSize).descriptor + mnode.visitMultiANewArrayInsn(descr, argsSize) + } + } + + + private def genApply(app: Apply, expectedType: BType): BType = { + var generatedType = expectedType + lineNumber(app) + app match { + case Apply(_, args) if app.symbol eq defn.newArrayMethod => + val List(elemClaz, Literal(c: Constant), ArrayValue(_, dims)) = args: @unchecked + + generatedType = toTypeKind(c.typeValue) + mkArrayConstructorCall(generatedType.asArrayBType, app, dims) + case Apply(t :TypeApply, _) => + generatedType = + if (t.symbol ne defn.Object_synchronized) genTypeApply(t) + else genSynchronized(app, expectedType) + + case Apply(fun @ DesugaredSelect(Super(superQual, _), _), args) => + // 'super' call: Note: since constructors are supposed to + // return an instance of what they construct, we have to take + // special care. On JVM they are 'void', and Scala forbids (syntactically) + // to call super constructors explicitly and/or use their 'returned' value. + // therefore, we can ignore this fact, and generate code that leaves nothing + // on the stack (contrary to what the type in the AST says). + + // scala/bug#10290: qual can be `this.$outer()` (not just `this`), so we call genLoad (not just ALOAD_0) + genLoad(superQual) + genLoadArguments(args, paramTKs(app)) + generatedType = genCallMethod(fun.symbol, InvokeStyle.Super, app.span) + + // 'new' constructor call: Note: since constructors are + // thought to return an instance of what they construct, + // we have to 'simulate' it by DUPlicating the freshly created + // instance (on JVM, methods return VOID). + case Apply(fun @ DesugaredSelect(New(tpt), nme.CONSTRUCTOR), args) => + val ctor = fun.symbol + assert(ctor.isClassConstructor, s"'new' call to non-constructor: ${ctor.name}") + + generatedType = toTypeKind(tpt.tpe) + assert(generatedType.isRef, s"Non reference type cannot be instantiated: $generatedType") + + generatedType match { + case arr: ArrayBType => + mkArrayConstructorCall(arr, app, args) + + case rt: ClassBType => + assert(classBTypeFromSymbol(ctor.owner) == rt, s"Symbol ${ctor.owner.showFullName} is different from $rt") + mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName) + bc dup generatedType + genLoadArguments(args, paramTKs(app)) + genCallMethod(ctor, InvokeStyle.Special, app.span) + + case _ => + abort(s"Cannot instantiate $tpt of kind: $generatedType") + } + + case Apply(fun, List(expr)) if Erasure.Boxing.isBox(fun.symbol) && fun.symbol.denot.owner != defn.UnitModuleClass => + val nativeKind = tpeTK(expr) + genLoad(expr, nativeKind) + val MethodNameAndType(mname, methodType) = asmBoxTo(nativeKind) + bc.invokestatic(srBoxesRuntimeRef.internalName, mname, methodType.descriptor, itf = false) + generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType) + + case Apply(fun, List(expr)) if Erasure.Boxing.isUnbox(fun.symbol) && fun.symbol.denot.owner != defn.UnitModuleClass => + genLoad(expr) + val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe) + generatedType = boxType + val MethodNameAndType(mname, methodType) = asmUnboxTo(boxType) + bc.invokestatic(srBoxesRuntimeRef.internalName, mname, methodType.descriptor, itf = false) + + case app @ Apply(fun, args) => + val sym = fun.symbol + + if (isPrimitive(fun)) { // primitive method call + generatedType = genPrimitiveOp(app, expectedType) + } else { // normal method call + val invokeStyle = + if (sym.isStaticMember) InvokeStyle.Static + else if (sym.is(Private) || sym.isClassConstructor) InvokeStyle.Special + else if (app.hasAttachment(BCodeHelpers.UseInvokeSpecial)) InvokeStyle.Special + else InvokeStyle.Virtual + + if (invokeStyle.hasInstance) genLoadQualifier(fun) + genLoadArguments(args, paramTKs(app)) + + val DesugaredSelect(qual, name) = fun: @unchecked // fun is a Select, also checked in genLoadQualifier + val isArrayClone = name == nme.clone_ && qual.tpe.widen.isInstanceOf[JavaArrayType] + if (isArrayClone) { + // Special-case Array.clone, introduced in 36ef60e. The goal is to generate this call + // as "[I.clone" instead of "java/lang/Object.clone". This is consistent with javac. + // Arrays have a public method `clone` (jls 10.7). + // + // The JVMS is not explicit about this, but that receiver type can be an array type + // descriptor (instead of a class internal name): + // invokevirtual #2; //Method "[I".clone:()Ljava/lang/Object + // + // Note that using `Object.clone()` would work as well, but only because the JVM + // relaxes protected access specifically if the receiver is an array: + // http://hg.openjdk.java.net/jdk8/jdk8/hotspot/file/87ee5ee27509/src/share/vm/interpreter/linkResolver.cpp#l439 + // Example: `class C { override def clone(): Object = "hi" }` + // Emitting `def f(c: C) = c.clone()` as `Object.clone()` gives a VerifyError. + val target: String = tpeTK(qual).asRefBType.classOrArrayType + val methodBType = asmMethodType(sym) + bc.invokevirtual(target, sym.javaSimpleName, methodBType.descriptor) + generatedType = methodBType.returnType + } else { + val receiverClass = if (!invokeStyle.isVirtual) null else { + // receiverClass is used in the bytecode to as the method receiver. using sym.owner + // may lead to IllegalAccessErrors, see 9954eaf / aladdin bug 455. + val qualSym = qual.tpe.typeSymbol + if (qualSym == defn.ArrayClass) { + // For invocations like `Array(1).hashCode` or `.wait()`, use Object as receiver + // in the bytecode. Using the array descriptor (like we do for clone above) seems + // to work as well, but it seems safer not to change this. Javac also uses Object. + // Note that array apply/update/length are handled by isPrimitive (above). + assert(sym.owner == defn.ObjectClass, s"unexpected array call: $app") + defn.ObjectClass + } else qualSym + } + generatedType = genCallMethod(sym, invokeStyle, app.span, receiverClass) + } + } + } + + generatedType + } // end of genApply() + + private def genArrayValue(av: tpd.JavaSeqLiteral): BType = { + val ArrayValue(tpt, elems) = av: @unchecked + + lineNumber(av) + genArray(elems, tpt) + } + + private def genArray(elems: List[Tree], elemType: Type): BType = { + val elmKind = toTypeKind(elemType) + val generatedType = ArrayBType(elmKind) + + bc iconst elems.length + bc newarray elmKind + + var i = 0 + var rest = elems + while (!rest.isEmpty) { + bc dup generatedType + bc iconst i + genLoad(rest.head, elmKind) + bc astore elmKind + rest = rest.tail + i = i + 1 + } + + generatedType + } + + /* A Match node contains one or more case clauses, each case clause lists one or more + * Int/String values to use as keys, and a code block. The exception is the "default" case + * clause which doesn't list any key (there is exactly one of these per match). + */ + private def genMatchTo(tree: Match, expectedType: BType, dest: LoadDestination): BType = tree match { + case Match(selector, cases) => + lineNumber(tree) + + val (generatedType, postMatch, postMatchDest) = + if dest == LoadDestination.FallThrough then + val postMatch = new asm.Label + (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch)) + else + (expectedType, null, dest) + + // Only two possible selector types exist in `Match` trees at this point: Int and String + if (tpeTK(selector) == INT) { + + /* On a first pass over the case clauses, we flatten the keys and their + * targets (the latter represented with asm.Labels). That representation + * allows JCodeMethodV to emit a lookupswitch or a tableswitch. + * + * On a second pass, we emit the switch blocks, one for each different target. + */ + + var flatKeys: List[Int] = Nil + var targets: List[asm.Label] = Nil + var default: asm.Label = null + var switchBlocks: List[(asm.Label, Tree)] = Nil + + genLoad(selector, INT) + + // collect switch blocks and their keys, but don't emit yet any switch-block. + for (caze @ CaseDef(pat, guard, body) <- cases) { + assert(guard == tpd.EmptyTree, guard) + val switchBlockPoint = new asm.Label + switchBlocks ::= (switchBlockPoint, body) + pat match { + case Literal(value) => + flatKeys ::= value.intValue + targets ::= switchBlockPoint + case Ident(nme.WILDCARD) => + assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") + default = switchBlockPoint + case Alternative(alts) => + alts foreach { + case Literal(value) => + flatKeys ::= value.intValue + targets ::= switchBlockPoint + case _ => + abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") + } + case _ => + abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") + } + } + + bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) + + // emit switch-blocks. + for (sb <- switchBlocks.reverse) { + val (caseLabel, caseBody) = sb + markProgramPoint(caseLabel) + genLoadTo(caseBody, generatedType, postMatchDest) + } + } else { + + /* Since the JVM doesn't have a way to switch on a string, we switch + * on the `hashCode` of the string then do an `equals` check (with a + * possible second set of jumps if blocks can be reach from multiple + * string alternatives). + * + * This mirrors the way that Java compiles `switch` on Strings. + */ + + var default: asm.Label = null + var indirectBlocks: List[(asm.Label, Tree)] = Nil + + + // Cases grouped by their hashCode + val casesByHash = SortedMap.empty[Int, List[(String, Either[asm.Label, Tree])]] + var caseFallback: Tree = null + + for (caze @ CaseDef(pat, guard, body) <- cases) { + assert(guard == tpd.EmptyTree, guard) + pat match { + case Literal(value) => + val strValue = value.stringValue + casesByHash.updateWith(strValue.##) { existingCasesOpt => + val newCase = (strValue, Right(body)) + Some(newCase :: existingCasesOpt.getOrElse(Nil)) + } + case Ident(nme.WILDCARD) => + assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") + default = new asm.Label + indirectBlocks ::= (default, body) + case Alternative(alts) => + // We need an extra basic block since multiple strings can lead to this code + val indirectCaseGroupLabel = new asm.Label + indirectBlocks ::= (indirectCaseGroupLabel, body) + alts foreach { + case Literal(value) => + val strValue = value.stringValue + casesByHash.updateWith(strValue.##) { existingCasesOpt => + val newCase = (strValue, Left(indirectCaseGroupLabel)) + Some(newCase :: existingCasesOpt.getOrElse(Nil)) + } + case _ => + abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") + } + + case _ => + abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") + } + } + + // Organize the hashCode options into switch cases + var flatKeys: List[Int] = Nil + var targets: List[asm.Label] = Nil + var hashBlocks: List[(asm.Label, List[(String, Either[asm.Label, Tree])])] = Nil + for ((hashValue, hashCases) <- casesByHash) { + val switchBlockPoint = new asm.Label + hashBlocks ::= (switchBlockPoint, hashCases) + flatKeys ::= hashValue + targets ::= switchBlockPoint + } + + // Push the hashCode of the string (or `0` it is `null`) onto the stack and switch on it + genLoadIfTo( + If( + tree.selector.select(defn.Any_==).appliedTo(nullLiteral), + Literal(Constant(0)), + tree.selector.select(defn.Any_hashCode).appliedToNone + ), + INT, + LoadDestination.FallThrough + ) + bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) + + // emit blocks for each hash case + for ((hashLabel, caseAlternatives) <- hashBlocks.reverse) { + markProgramPoint(hashLabel) + for ((caseString, indirectLblOrBody) <- caseAlternatives) { + val comparison = if (caseString == null) defn.Any_== else defn.Any_equals + val condp = Literal(Constant(caseString)).select(defn.Any_==).appliedTo(tree.selector) + val keepGoing = new asm.Label + indirectLblOrBody match { + case Left(jump) => + genCond(condp, jump, keepGoing, targetIfNoJump = keepGoing) + + case Right(caseBody) => + val thisCaseMatches = new asm.Label + genCond(condp, thisCaseMatches, keepGoing, targetIfNoJump = thisCaseMatches) + markProgramPoint(thisCaseMatches) + genLoadTo(caseBody, generatedType, postMatchDest) + } + markProgramPoint(keepGoing) + } + bc goTo default + } + + // emit blocks for common patterns + for ((caseLabel, caseBody) <- indirectBlocks.reverse) { + markProgramPoint(caseLabel) + genLoadTo(caseBody, generatedType, postMatchDest) + } + } + + if postMatch != null then + markProgramPoint(postMatch) + generatedType + } + + def genBlockTo(tree: Block, expectedType: BType, dest: LoadDestination): Unit = tree match { + case Block(stats, expr) => + + val savedScope = varsInScope + varsInScope = Nil + stats foreach genStat + genLoadTo(expr, expectedType, dest) + emitLocalVarScopes() + varsInScope = savedScope + } + + /** Add entries to the `LocalVariableTable` JVM attribute for all the vars in + * `varsInScope`, ending at the current program point. + */ + def emitLocalVarScopes(): Unit = + if (emitVars) { + val end = currProgramPoint() + for ((sym, start) <- varsInScope.reverse) { + emitLocalVarScope(sym, start, end) + } + } + end emitLocalVarScopes + + def adapt(from: BType, to: BType): Unit = { + if (!from.conformsTo(to)) { + to match { + case UNIT => bc drop from + case _ => bc.emitT2T(from, to) + } + } else if (from.isNothingType) { + /* There are two possibilities for from.isNothingType: emitting a "throw e" expressions and + * loading a (phantom) value of type Nothing. + * + * The Nothing type in Scala's type system does not exist in the JVM. In bytecode, Nothing + * is mapped to scala.runtime.Nothing$. To the JVM, a call to Predef.??? looks like it would + * return an object of type Nothing$. We need to do something with that phantom object on + * the stack. "Phantom" because it never exists: such methods always throw, but the JVM does + * not know that. + * + * Note: The two verifiers (old: type inference, new: type checking) have different + * requirements. Very briefly: + * + * Old (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.2.1): at + * each program point, no matter what branches were taken to get there + * - Stack is same size and has same typed values + * - Local and stack values need to have consistent types + * - In practice, the old verifier seems to ignore unreachable code and accept any + * instructions after an ATHROW. For example, there can be another ATHROW (without + * loading another throwable first). + * + * New (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1) + * - Requires consistent stack map frames. GenBCode generates stack frames if -target:jvm-1.6 + * or higher. + * - In practice: the ASM library computes stack map frames for us (ClassWriter). Emitting + * correct frames after an ATHROW is probably complex, so ASM uses the following strategy: + * - Every time when generating an ATHROW, a new basic block is started. + * - During classfile writing, such basic blocks are found to be dead: no branches go there + * - Eliminating dead code would probably require complex shifts in the output byte buffer + * - But there's an easy solution: replace all code in the dead block with with + * `nop; nop; ... nop; athrow`, making sure the bytecode size stays the same + * - The corresponding stack frame can be easily generated: on entering a dead the block, + * the frame requires a single Throwable on the stack. + * - Since there are no branches to the dead block, the frame requirements are never violated. + * + * To summarize the above: it does matter what we emit after an ATHROW. + * + * NOW: if we end up here because we emitted a load of a (phantom) value of type Nothing$, + * there was no ATHROW emitted. So, we have to make the verifier happy and do something + * with that value. Since Nothing$ extends Throwable, the easiest is to just emit an ATHROW. + * + * If we ended up here because we generated a "throw e" expression, we know the last + * emitted instruction was an ATHROW. As explained above, it is OK to emit a second ATHROW, + * the verifiers will be happy. + */ + if (lastInsn.getOpcode != asm.Opcodes.ATHROW) + emit(asm.Opcodes.ATHROW) + } else if (from.isNullType) { + /* After loading an expression of type `scala.runtime.Null$`, introduce POP; ACONST_NULL. + * This is required to pass the verifier: in Scala's type system, Null conforms to any + * reference type. In bytecode, the type Null is represented by scala.runtime.Null$, which + * is not a subtype of all reference types. Example: + * + * def nl: Null = null // in bytecode, nl has return type scala.runtime.Null$ + * val a: String = nl // OK for Scala but not for the JVM, scala.runtime.Null$ does not conform to String + * + * In order to fix the above problem, the value returned by nl is dropped and ACONST_NULL is + * inserted instead - after all, an expression of type scala.runtime.Null$ can only be null. + */ + if (lastInsn.getOpcode != asm.Opcodes.ACONST_NULL) { + bc drop from + emit(asm.Opcodes.ACONST_NULL) + } + } + else (from, to) match { + case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG) + case _ => () + } + } + + /* Emit code to Load the qualifier of `tree` on top of the stack. */ + def genLoadQualifier(tree: Tree): Unit = { + lineNumber(tree) + tree match { + case DesugaredSelect(qualifier, _) => genLoad(qualifier) + case t: Ident => // dotty specific + cachedDesugarIdent(t) match { + case Some(sel) => genLoadQualifier(sel) + case None => + assert(t.symbol.owner == this.claszSymbol) + } + case _ => abort(s"Unknown qualifier $tree") + } + } + + def genLoadArguments(args: List[Tree], btpes: List[BType]): Unit = + args match + case arg :: args1 => + btpes match + case btpe :: btpes1 => + genLoad(arg, btpe) + genLoadArguments(args1, btpes1) + case _ => + case _ => + + def genLoadModule(tree: Tree): BType = { + val module = ( + if (!tree.symbol.is(PackageClass)) tree.symbol + else tree.symbol.info.member(nme.PACKAGE).symbol match { + case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree") + case s => abort(s"SI-5604: found package class where package object expected: $tree") + } + ) + lineNumber(tree) + genLoadModule(module) + symInfoTK(module) + } + + def genLoadModule(module: Symbol): Unit = { + def inStaticMethod = methSymbol != null && methSymbol.isStaticMember + if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) { + mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) + } else { + val mbt = symInfoTK(module).asClassBType + mnode.visitFieldInsn( + asm.Opcodes.GETSTATIC, + mbt.internalName /* + "$" */ , + str.MODULE_INSTANCE_FIELD, + mbt.descriptor // for nostalgics: toTypeKind(module.tpe).descriptor + ) + } + } + + def genConversion(from: BType, to: BType, cast: Boolean): Unit = { + if (cast) { bc.emitT2T(from, to) } + else { + bc drop from + bc boolconst (from == to) + } + } + + def genCast(to: RefBType, cast: Boolean): Unit = { + if (cast) { bc checkCast to } + else { bc isInstance to } + } + + /* Is the given symbol a primitive operation? */ + def isPrimitive(fun: Tree): Boolean = { + primitives.isPrimitive(fun) + } + + /* Generate coercion denoted by "code" */ + def genCoercion(code: Int): Unit = { + import ScalaPrimitivesOps._ + (code: @switch) match { + case B2B | S2S | C2C | I2I | L2L | F2F | D2D => () + case _ => + val from = coercionFrom(code) + val to = coercionTo(code) + bc.emitT2T(from, to) + } + } + + /* Generate string concatenation + * + * On JDK 8: create and append using `StringBuilder` + * On JDK 9+: use `invokedynamic` with `StringConcatFactory` + */ + def genStringConcat(tree: Tree): BType = { + lineNumber(tree) + liftStringConcat(tree) match { + // Optimization for expressions of the form "" + x + case List(Literal(Constant("")), arg) => + genLoad(arg, ObjectRef) + genCallMethod(defn.String_valueOf_Object, InvokeStyle.Static) + + case concatenations => + val concatArguments = concatenations.view + .filter { + case Literal(Constant("")) => false // empty strings are no-ops in concatenation + case _ => true + } + .map { + case Apply(boxOp, value :: Nil) if Erasure.Boxing.isBox(boxOp.symbol) && boxOp.symbol.denot.owner != defn.UnitModuleClass => + // Eliminate boxing of primitive values. Boxing is introduced by erasure because + // there's only a single synthetic `+` method "added" to the string class. + value + case other => other + } + .toList + + // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower + if (classfileVersion < asm.Opcodes.V9) { + + // Estimate capacity needed for the string builder + val approxBuilderSize = concatArguments.view.map { + case Literal(Constant(s: String)) => s.length + case Literal(c @ Constant(_)) if c.isNonUnitAnyVal => String.valueOf(c).length + case _ => 0 + }.sum + bc.genNewStringBuilder(approxBuilderSize) + + for (elem <- concatArguments) { + val elemType = tpeTK(elem) + genLoad(elem, elemType) + bc.genStringBuilderAppend(elemType) + } + bc.genStringBuilderEnd + } else { + + /* `StringConcatFactory#makeConcatWithConstants` accepts max 200 argument slots. If + * the string concatenation is longer (unlikely), we spill into multiple calls + */ + val MaxIndySlots = 200 + val TagArg = '\u0001' // indicates a hole (in the recipe string) for an argument + val TagConst = '\u0002' // indicates a hole (in the recipe string) for a constant + + val recipe = new StringBuilder() + val argTypes = Seq.newBuilder[asm.Type] + val constVals = Seq.newBuilder[String] + var totalArgSlots = 0 + var countConcats = 1 // ie. 1 + how many times we spilled + + for (elem <- concatArguments) { + val tpe = tpeTK(elem) + val elemSlots = tpe.size + + // Unlikely spill case + if (totalArgSlots + elemSlots >= MaxIndySlots) { + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + countConcats += 1 + totalArgSlots = 0 + recipe.setLength(0) + argTypes.clear() + constVals.clear() + } + + elem match { + case Literal(Constant(s: String)) => + if (s.contains(TagArg) || s.contains(TagConst)) { + totalArgSlots += elemSlots + recipe.append(TagConst) + constVals += s + } else { + recipe.append(s) + } + + case other => + totalArgSlots += elemSlots + recipe.append(TagArg) + val tpe = tpeTK(elem) + argTypes += tpe.toASMType + genLoad(elem, tpe) + } + } + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + + // If we spilled, generate one final concat + if (countConcats > 1) { + bc.genIndyStringConcat( + TagArg.toString * countConcats, + Seq.fill(countConcats)(StringRef.toASMType), + Seq.empty + ) + } + } + } + StringRef + } + + /** + * Generate a method invocation. If `specificReceiver != null`, it is used as receiver in the + * invocation instruction, otherwise `method.owner`. A specific receiver class is needed to + * prevent an IllegalAccessError, (aladdin bug 455). + */ + def genCallMethod(method: Symbol, style: InvokeStyle, pos: Span = NoSpan, specificReceiver: Symbol = null): BType = { + val methodOwner = method.owner + + // the class used in the invocation's method descriptor in the classfile + val receiverClass = { + if (specificReceiver != null) + assert(style.isVirtual || specificReceiver == methodOwner, s"specificReceiver can only be specified for virtual calls. $method - $specificReceiver") + + val useSpecificReceiver = specificReceiver != null && !defn.isBottomClass(specificReceiver) && !method.isScalaStatic + val receiver = if (useSpecificReceiver) specificReceiver else methodOwner + + // workaround for a JVM bug: https://bugs.openjdk.java.net/browse/JDK-8154587 + // when an interface method overrides a member of Object (note that all interfaces implicitly + // have superclass Object), the receiver needs to be the interface declaring the override (and + // not a sub-interface that inherits it). example: + // trait T { override def clone(): Object = "" } + // trait U extends T + // class C extends U + // class D { def f(u: U) = u.clone() } + // The invocation `u.clone()` needs `T` as a receiver: + // - using Object is illegal, as Object.clone is protected + // - using U results in a `NoSuchMethodError: U.clone. This is the JVM bug. + // Note that a mixin forwarder is generated, so the correct method is executed in the end: + // class C { override def clone(): Object = super[T].clone() } + val isTraitMethodOverridingObjectMember = { + receiver != methodOwner && // fast path - the boolean is used to pick either of these two, if they are the same it does not matter + style.isVirtual && + isEmittedInterface(receiver) && + defn.ObjectType.decl(method.name).symbol.exists && { // fast path - compute overrideChain on the next line only if necessary + val syms = method.allOverriddenSymbols.toList + !syms.isEmpty && syms.last.owner == defn.ObjectClass + } + } + if (isTraitMethodOverridingObjectMember) methodOwner else receiver + } + + receiverClass.info // ensure types the type is up to date; erasure may add lateINTERFACE to traits + val receiverName = internalName(receiverClass) + + val jname = method.javaSimpleName + val bmType = asmMethodType(method) + val mdescr = bmType.descriptor + + val isInterface = isEmittedInterface(receiverClass) + import InvokeStyle._ + if (style == Super) { + if (isInterface && !method.is(JavaDefined)) { + val args = new Array[BType](bmType.argumentTypes.length + 1) + val ownerBType = toTypeKind(method.owner.info) + bmType.argumentTypes.copyToArray(args, 1) + val staticDesc = MethodBType(ownerBType :: bmType.argumentTypes, bmType.returnType).descriptor + val staticName = traitSuperAccessorName(method) + bc.invokestatic(receiverName, staticName, staticDesc, isInterface) + } else { + bc.invokespecial(receiverName, jname, mdescr, isInterface) + } + } else { + val opc = style match { + case Static => Opcodes.INVOKESTATIC + case Special => Opcodes.INVOKESPECIAL + case Virtual => if (isInterface) Opcodes.INVOKEINTERFACE else Opcodes.INVOKEVIRTUAL + } + bc.emitInvoke(opc, receiverName, jname, mdescr, isInterface) + } + + bmType.returnType + } // end of genCallMethod() + + /* Generate the scala ## method. */ + def genScalaHash(tree: Tree): BType = { + genLoad(tree, ObjectRef) + genCallMethod(NoSymbol, InvokeStyle.Static) // used to dispatch ## on primitives to ScalaRuntime.hash. Should be implemented by a miniphase + } + + /* + * Returns a list of trees that each should be concatenated, from left to right. + * It turns a chained call like "a".+("b").+("c") into a list of arguments. + */ + def liftStringConcat(tree: Tree): List[Tree] = tree match { + case tree @ Apply(fun @ DesugaredSelect(larg, method), rarg) => + if (isPrimitive(fun) && + primitives.getPrimitive(tree, larg.tpe) == ScalaPrimitivesOps.CONCAT) + liftStringConcat(larg) ::: rarg + else + tree :: Nil + case _ => + tree :: Nil + } + + /* Emit code to compare the two top-most stack values using the 'op' operator. */ + private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { + if (targetIfNoJump == success) genCJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) + else { + if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT + bc.emitIF_ICMP(op, success) + } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) + bc.emitIF_ACMP(op, success) + } else { + import Primitives._ + def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE + (tk: @unchecked) match { + case LONG => emit(asm.Opcodes.LCMP) + case FLOAT => emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL) + case DOUBLE => emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) + } + bc.emitIF(op, success) + } + if (targetIfNoJump != failure) bc goTo failure + } + } + + /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */ + private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { + import Primitives._ + if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) + else { + if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT + bc.emitIF(op, success) + } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) + (op: @unchecked) match { // references are only compared with EQ and NE + case EQ => bc emitIFNULL success + case NE => bc emitIFNONNULL success + } + } else { + def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE + (tk: @unchecked) match { + case LONG => + emit(asm.Opcodes.LCONST_0) + emit(asm.Opcodes.LCMP) + case FLOAT => + emit(asm.Opcodes.FCONST_0) + emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL) + case DOUBLE => + emit(asm.Opcodes.DCONST_0) + emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) + } + bc.emitIF(op, success) + } + if (targetIfNoJump != failure) bc goTo failure + } + } + + def testOpForPrimitive(primitiveCode: Int) = (primitiveCode: @switch) match { + case ScalaPrimitivesOps.ID => Primitives.EQ + case ScalaPrimitivesOps.NI => Primitives.NE + case ScalaPrimitivesOps.EQ => Primitives.EQ + case ScalaPrimitivesOps.NE => Primitives.NE + case ScalaPrimitivesOps.LT => Primitives.LT + case ScalaPrimitivesOps.LE => Primitives.LE + case ScalaPrimitivesOps.GT => Primitives.GT + case ScalaPrimitivesOps.GE => Primitives.GE + } + + /* + * Generate code for conditional expressions. + * The jump targets success/failure of the test are `then-target` and `else-target` resp. + */ + private def genCond(tree: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = { + + def genComparisonOp(l: Tree, r: Tree, code: Int): Unit = { + val op = testOpForPrimitive(code) + def isNull(t: Tree): Boolean = t match { + case Literal(Constant(null)) => true + case _ => false + } + def ifOneIsNull(l: Tree, r: Tree): Tree = if (isNull(l)) r else if (isNull(r)) l else null + val nonNullSide = if (ScalaPrimitivesOps.isReferenceEqualityOp(code)) ifOneIsNull(l, r) else null + if (nonNullSide != null) { + // special-case reference (in)equality test for null (null eq x, x eq null) + genLoad(nonNullSide, ObjectRef) + genCZJUMP(success, failure, op, ObjectRef, targetIfNoJump) + } else { + val tk = tpeTK(l).maxType(tpeTK(r)) + genLoad(l, tk) + genLoad(r, tk) + genCJUMP(success, failure, op, tk, targetIfNoJump) + } + } + + def loadAndTestBoolean() = { + genLoad(tree, BOOL) + genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) + } + + lineNumber(tree) + tree match { + + case tree @ Apply(fun, args) if primitives.isPrimitive(fun.symbol) => + import ScalaPrimitivesOps.{ ZNOT, ZAND, ZOR, EQ } + + // lhs and rhs of test + lazy val DesugaredSelect(lhs, _) = fun: @unchecked + val rhs = if (args.isEmpty) tpd.EmptyTree else args.head // args.isEmpty only for ZNOT + + def genZandOrZor(and: Boolean): Unit = { + // reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited). + val keepGoing = new asm.Label + + if (and) genCond(lhs, keepGoing, failure, targetIfNoJump = keepGoing) + else genCond(lhs, success, keepGoing, targetIfNoJump = keepGoing) + + markProgramPoint(keepGoing) + genCond(rhs, success, failure, targetIfNoJump) + } + + primitives.getPrimitive(fun.symbol) match { + case ZNOT => genCond(lhs, failure, success, targetIfNoJump) + case ZAND => genZandOrZor(and = true) + case ZOR => genZandOrZor(and = false) + case code => + if (ScalaPrimitivesOps.isUniversalEqualityOp(code) && tpeTK(lhs).isClass) { + // rewrite `==` to null tests and `equals`. not needed for arrays (`equals` is reference equality). + if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure, targetIfNoJump) + else genEqEqPrimitive(lhs, rhs, failure, success, targetIfNoJump) + } else if (ScalaPrimitivesOps.isComparisonOp(code)) { + genComparisonOp(lhs, rhs, code) + } else + loadAndTestBoolean() + } + + case Block(stats, expr) => + /* Push the decision further down the `expr`. + * This is particularly effective for the shape of do..while loops. + */ + val savedScope = varsInScope + varsInScope = Nil + stats foreach genStat + genCond(expr, success, failure, targetIfNoJump) + emitLocalVarScopes() + varsInScope = savedScope + + case If(condp, thenp, elsep) => + val innerSuccess = new asm.Label + val innerFailure = new asm.Label + genCond(condp, innerSuccess, innerFailure, targetIfNoJump = innerSuccess) + markProgramPoint(innerSuccess) + genCond(thenp, success, failure, targetIfNoJump = innerFailure) + markProgramPoint(innerFailure) + genCond(elsep, success, failure, targetIfNoJump) + + case _ => loadAndTestBoolean() + } + + } // end of genCond() + + /* + * Generate the "==" code for object references. It is equivalent of + * if (l eq null) r eq null else l.equals(r); + * + * @param l left-hand-side of the '==' + * @param r right-hand-side of the '==' + */ + def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = { + + /* True if the equality comparison is between values that require the use of the rich equality + * comparator (scala.runtime.Comparator.equals). This is the case when either side of the + * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character. + * When it is statically known that both sides are equal and subtypes of Number of Character, + * not using the rich equality is possible (their own equals method will do ok.) + */ + val mustUseAnyComparator: Boolean = { + val areSameFinals = l.tpe.typeSymbol.is(Final) && r.tpe.typeSymbol.is(Final) && (l.tpe =:= r.tpe) + // todo: remove + def isMaybeBoxed(sym: Symbol): Boolean = { + (sym == defn.ObjectClass) || + (sym == defn.JavaSerializableClass) || + (sym == defn.ComparableClass) || + (sym derivesFrom defn.BoxedNumberClass) || + (sym derivesFrom defn.BoxedCharClass) || + (sym derivesFrom defn.BoxedBooleanClass) + } + !areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol) + } + def isNull(t: Tree): Boolean = t match { + case Literal(Constant(null)) => true + case _ => false + } + def isNonNullExpr(t: Tree): Boolean = t.isInstanceOf[Literal] || ((t.symbol ne null) && t.symbol.is(Module)) + + if (mustUseAnyComparator) { + val equalsMethod: Symbol = { + if (l.tpe <:< defn.BoxedNumberClass.info) { + if (r.tpe <:< defn.BoxedNumberClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) + else if (r.tpe <:< defn.BoxedCharClass.info) NoSymbol // ctx.requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private + else defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) + } else defn.BoxesRunTimeModule_externalEquals + } + + genLoad(l, ObjectRef) + genLoad(r, ObjectRef) + genCallMethod(equalsMethod, InvokeStyle.Static) + genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) + } + else { + if (isNull(l)) { + // null == expr -> expr eq null + genLoad(r, ObjectRef) + genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump) + } else if (isNull(r)) { + // expr == null -> expr eq null + genLoad(l, ObjectRef) + genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump) + } else if (isNonNullExpr(l)) { + // SI-7852 Avoid null check if L is statically non-null. + genLoad(l, ObjectRef) + genLoad(r, ObjectRef) + genCallMethod(defn.Any_equals, InvokeStyle.Virtual) + genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) + } else { + // l == r -> if (l eq null) r eq null else l.equals(r) + val eqEqTempLocal = locals.makeLocal(ObjectRef, nme.EQEQ_LOCAL_VAR.mangledString, defn.ObjectType, r.span) + val lNull = new asm.Label + val lNonNull = new asm.Label + + genLoad(l, ObjectRef) + genLoad(r, ObjectRef) + locals.store(eqEqTempLocal) + bc dup ObjectRef + genCZJUMP(lNull, lNonNull, Primitives.EQ, ObjectRef, targetIfNoJump = lNull) + + markProgramPoint(lNull) + bc drop ObjectRef + locals.load(eqEqTempLocal) + genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump = lNonNull) + + markProgramPoint(lNonNull) + locals.load(eqEqTempLocal) + genCallMethod(defn.Any_equals, InvokeStyle.Virtual) + genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) + } + } + } + + + def genSynchronized(tree: Apply, expectedType: BType): BType + def genLoadTry(tree: Try): BType + + def genInvokeDynamicLambda(ctor: Symbol, lambdaTarget: Symbol, environmentSize: Int, functionalInterface: Symbol): BType = { + import java.lang.invoke.LambdaMetafactory.{FLAG_BRIDGES, FLAG_SERIALIZABLE} + + report.debuglog(s"Using invokedynamic rather than `new ${ctor.owner}`") + val generatedType = classBTypeFromSymbol(functionalInterface) + // Lambdas should be serializable if they implement a SAM that extends Serializable or if they + // implement a scala.Function* class. + val isSerializable = functionalInterface.isSerializable || defn.isFunctionClass(functionalInterface) + val isInterface = isEmittedInterface(lambdaTarget.owner) + val invokeStyle = + if (lambdaTarget.isStaticMember) asm.Opcodes.H_INVOKESTATIC + else if (lambdaTarget.is(Private) || lambdaTarget.isClassConstructor) asm.Opcodes.H_INVOKESPECIAL + else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE + else asm.Opcodes.H_INVOKEVIRTUAL + + val targetHandle = + new asm.Handle(invokeStyle, + classBTypeFromSymbol(lambdaTarget.owner).internalName, + lambdaTarget.javaSimpleName, + asmMethodType(lambdaTarget).descriptor, + /* itf = */ isInterface) + + val (a,b) = lambdaTarget.info.firstParamTypes.splitAt(environmentSize) + var (capturedParamsTypes, lambdaParamTypes) = (a,b) + + if (invokeStyle != asm.Opcodes.H_INVOKESTATIC) capturedParamsTypes = lambdaTarget.owner.info :: capturedParamsTypes + + // Requires https://github.com/scala/scala-java8-compat on the runtime classpath + val returnUnit = lambdaTarget.info.resultType.typeSymbol == defn.UnitClass + val functionalInterfaceDesc: String = generatedType.descriptor + val desc = capturedParamsTypes.map(tpe => toTypeKind(tpe)).mkString(("("), "", ")") + functionalInterfaceDesc + // TODO specialization + val instantiatedMethodType = new MethodBType(lambdaParamTypes.map(p => toTypeKind(p)), toTypeKind(lambdaTarget.info.resultType)).toASMType + + val samMethod = atPhase(erasurePhase) { + val samMethods = toDenot(functionalInterface).info.possibleSamMethods.toList + samMethods match { + case x :: Nil => x.symbol + case Nil => abort(s"${functionalInterface.show} is not a functional interface. It doesn't have abstract methods") + case xs => abort(s"${functionalInterface.show} is not a functional interface. " + + s"It has the following abstract methods: ${xs.map(_.name).mkString(", ")}") + } + } + + val methodName = samMethod.javaSimpleName + val samMethodType = asmMethodType(samMethod).toASMType + // scala/bug#10334: make sure that a lambda object for `T => U` has a method `apply(T)U`, not only the `(Object)Object` + // version. Using the lambda a structural type `{def apply(t: T): U}` causes a reflective lookup for this method. + val needsGenericBridge = samMethodType != instantiatedMethodType + val bridgeMethods = atPhase(erasurePhase){ + samMethod.allOverriddenSymbols.toList + } + val overriddenMethodTypes = bridgeMethods.map(b => asmMethodType(b).toASMType) + + // any methods which `samMethod` overrides need bridges made for them + // this is done automatically during erasure for classes we generate, but LMF needs to have them explicitly mentioned + // so we have to compute them at this relatively late point. + val bridgeTypes = ( + if (needsGenericBridge) + instantiatedMethodType +: overriddenMethodTypes + else + overriddenMethodTypes + ).distinct.filterNot(_ == samMethodType) + + val needsBridges = bridgeTypes.nonEmpty + + def flagIf(b: Boolean, flag: Int): Int = if (b) flag else 0 + val flags = flagIf(isSerializable, FLAG_SERIALIZABLE) | flagIf(needsBridges, FLAG_BRIDGES) + + val bsmArgs0 = Seq(samMethodType, targetHandle, instantiatedMethodType) + val bsmArgs1 = if (flags != 0) Seq(Int.box(flags)) else Seq.empty + val bsmArgs2 = if needsBridges then bridgeTypes.length +: bridgeTypes else Seq.empty + + val bsmArgs = bsmArgs0 ++ bsmArgs1 ++ bsmArgs2 + + val metafactory = + if (flags != 0) + jliLambdaMetaFactoryAltMetafactoryHandle // altMetafactory required to be able to pass the flags and additional arguments if needed + else + jliLambdaMetaFactoryMetafactoryHandle + + bc.jmethod.visitInvokeDynamicInsn(methodName, desc, metafactory, bsmArgs: _*) + + generatedType + } + } + + /** Does this symbol actually correspond to an interface that will be emitted? + * In the backend, this should be preferred over `isInterface` because it + * also returns true for the symbols of the fake companion objects we + * create for Java-defined classes as well as for Java annotations + * which we represent as classes. + */ + private def isEmittedInterface(sym: Symbol): Boolean = sym.isInterface || + sym.is(JavaDefined) && (toDenot(sym).isAnnotation || sym.is(ModuleClass) && (sym.companionClass.is(PureInterface)) || sym.companionClass.is(Trait)) + + +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala new file mode 100644 index 000000000000..6f83af540bea --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala @@ -0,0 +1,960 @@ +package dotty.tools +package backend +package jvm + +import scala.language.unsafeNulls + +import scala.annotation.threadUnsafe +import scala.tools.asm +import scala.tools.asm.AnnotationVisitor +import scala.tools.asm.ClassWriter +import scala.collection.mutable + +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.Trees +import dotty.tools.dotc.core.Annotations._ +import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Phases._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Names.Name +import dotty.tools.dotc.core.NameKinds.ExpandedName +import dotty.tools.dotc.core.Signature +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.NameKinds +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.TypeErasure +import dotty.tools.dotc.transform.GenericSignatures +import dotty.tools.dotc.transform.ElimErasedValueType +import dotty.tools.io.AbstractFile +import dotty.tools.dotc.report + +import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions + +/* + * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes. + * + * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded + * @version 1.0 + * + */ +trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { + // for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce + + //import global._ + //import bTypes._ + //import coreBTypes._ + import bTypes._ + import tpd._ + import coreBTypes._ + import int.{_, given} + import DottyBackendInterface._ + + def ScalaATTRName: String = "Scala" + def ScalaSignatureATTRName: String = "ScalaSig" + + @threadUnsafe lazy val AnnotationRetentionAttr: ClassSymbol = requiredClass("java.lang.annotation.Retention") + @threadUnsafe lazy val AnnotationRetentionSourceAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("SOURCE") + @threadUnsafe lazy val AnnotationRetentionClassAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("CLASS") + @threadUnsafe lazy val AnnotationRetentionRuntimeAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("RUNTIME") + + val bCodeAsmCommon: BCodeAsmCommon[int.type] = new BCodeAsmCommon(int) + + /* + * must-single-thread + */ + def getFileForClassfile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { + getFile(base, clsName, suffix) + } + + /* + * must-single-thread + */ + def getOutFolder(csym: Symbol, cName: String): AbstractFile = { + try { + outputDirectory + } catch { + case ex: Throwable => + report.error(em"Couldn't create file for class $cName\n${ex.getMessage}", ctx.source.atSpan(csym.span)) + null + } + } + + final def traitSuperAccessorName(sym: Symbol): String = { + val nameString = sym.javaSimpleName.toString + if (sym.name == nme.TRAIT_CONSTRUCTOR) nameString + else nameString + "$" + } + + // ----------------------------------------------------------------------------------------- + // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) + // Background: + // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf + // http://comments.gmane.org/gmane.comp.java.vm.languages/2293 + // https://issues.scala-lang.org/browse/SI-3872 + // ----------------------------------------------------------------------------------------- + + /* An `asm.ClassWriter` that uses `jvmWiseLUB()` + * The internal name of the least common ancestor of the types given by inameA and inameB. + * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow + */ + final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) { + + /** + * This method is thread-safe: it depends only on the BTypes component, which does not depend + * on global. TODO @lry move to a different place where no global is in scope, on bTypes. + */ + override def getCommonSuperClass(inameA: String, inameB: String): String = { + val a = classBTypeFromInternalName(inameA) + val b = classBTypeFromInternalName(inameB) + val lub = a.jvmWiseLUB(b) + val lubName = lub.internalName + assert(lubName != "scala/Any") + lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. + } + } + + /* + * must-single-thread + */ + def initBytecodeWriter(): BytecodeWriter = { + (None: Option[AbstractFile] /*getSingleOutput*/) match { // todo: implement + case Some(f) if f.hasExtension("jar") => + new DirectToJarfileWriter(f.file) + case _ => + factoryNonJarBytecodeWriter() + } + } + + /* + * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner + * classes in BTypes.scala. + * + * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of + * each inner class it lists (those are looked up and included). + * + * This method serializes in the InnerClasses JVM attribute in an appropriate order, + * not necessarily that given by `refedInnerClasses`. + * + * can-multi-thread + */ + final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { + // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler + val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) + allNestedClasses ++= declaredInnerClasses + refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) + for nestedClass <- allNestedClasses + do { + // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. + val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked + jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) + } + } + + /* + * can-multi-thread + */ + def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = { + new asm.Attribute(name) { + override def write(classWriter: ClassWriter, code: Array[Byte], + codeLength: Int, maxStack: Int, maxLocals: Int): asm.ByteVector = { + val byteVector = new asm.ByteVector(len) + byteVector.putByteArray(b, offset, len) + byteVector + } + } + } + + /* + * Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only + * i.e., the pickle is contained in a custom annotation, see: + * (1) `addAnnotations()`, + * (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10 + * (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5 + * That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9) + * other than both ending up encoded as attributes (JVMS 4.7) + * (with the caveat that the "ScalaSig" attribute is associated to some classes, + * while the "Signature" attribute can be associated to classes, methods, and fields.) + * + */ + trait BCPickles { + + import dotty.tools.dotc.core.unpickleScala2.{ PickleFormat, PickleBuffer } + + val versionPickle = { + val vp = new PickleBuffer(new Array[Byte](16), -1, 0) + assert(vp.writeIndex == 0, vp) + vp writeNat PickleFormat.MajorVersion + vp writeNat PickleFormat.MinorVersion + vp writeNat 0 + vp + } + + /* + * can-multi-thread + */ + def pickleMarkerLocal = { + createJAttribute(ScalaSignatureATTRName, versionPickle.bytes, 0, versionPickle.writeIndex) + } + + /* + * can-multi-thread + */ + def pickleMarkerForeign = { + createJAttribute(ScalaATTRName, new Array[Byte](0), 0, 0) + } + } // end of trait BCPickles + + trait BCInnerClassGen extends caps.Pure { + + def debugLevel = 3 // 0 -> no debug info; 1-> filename; 2-> lines; 3-> varnames + + final val emitSource = debugLevel >= 1 + final val emitLines = debugLevel >= 2 + final val emitVars = debugLevel >= 3 + + /** + * The class internal name for a given class symbol. + */ + final def internalName(sym: Symbol): String = { + // For each java class, the scala compiler creates a class and a module (thus a module class). + // If the `sym` is a java module class, we use the java class instead. This ensures that the + // ClassBType is created from the main class (instead of the module class). + // The two symbols have the same name, so the resulting internalName is the same. + val classSym = if (sym.is(JavaDefined) && sym.is(ModuleClass)) sym.linkedClass else sym + getClassBType(classSym).internalName + } + + private def assertClassNotArray(sym: Symbol): Unit = { + assert(sym.isClass, sym) + assert(sym != defn.ArrayClass || compilingArray, sym) + } + + private def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = { + assertClassNotArray(sym) + assert(!primitiveTypeMap.contains(sym) || isCompilingPrimitive, sym) + } + + /** + * The ClassBType for a class symbol. + * + * The class symbol scala.Nothing is mapped to the class scala.runtime.Nothing$. Similarly, + * scala.Null is mapped to scala.runtime.Null$. This is because there exist no class files + * for the Nothing / Null. If used for example as a parameter type, we use the runtime classes + * in the classfile method signature. + * + * Note that the referenced class symbol may be an implementation class. For example when + * compiling a mixed-in method that forwards to the static method in the implementation class, + * the class descriptor of the receiver (the implementation class) is obtained by creating the + * ClassBType. + */ + final def getClassBType(sym: Symbol): ClassBType = { + assertClassNotArrayNotPrimitive(sym) + + if (sym == defn.NothingClass) srNothingRef + else if (sym == defn.NullClass) srNullRef + else classBTypeFromSymbol(sym) + } + + /* + * must-single-thread + */ + final def asmMethodType(msym: Symbol): MethodBType = { + assert(msym.is(Method), s"not a method-symbol: $msym") + val resT: BType = + if (msym.isClassConstructor || msym.isConstructor) UNIT + else toTypeKind(msym.info.resultType) + MethodBType(msym.info.firstParamTypes map toTypeKind, resT) + } + + /** + * The jvm descriptor of a type. + */ + final def typeDescriptor(t: Type): String = { toTypeKind(t).descriptor } + + /** + * The jvm descriptor for a symbol. + */ + final def symDescriptor(sym: Symbol): String = getClassBType(sym).descriptor + + final def toTypeKind(tp: Type): BType = typeToTypeKind(tp)(BCodeHelpers.this)(this) + + } // end of trait BCInnerClassGen + + trait BCAnnotGen extends BCInnerClassGen { + + /* + * must-single-thread + */ + def emitAnnotations(cw: asm.ClassVisitor, annotations: List[Annotation]): Unit = + for(annot <- annotations; if shouldEmitAnnotation(annot)) { + val typ = annot.tree.tpe + val assocs = assocsFromApply(annot.tree) + val av = cw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) + emitAssocs(av, assocs, BCodeHelpers.this)(this) + } + + /* + * must-single-thread + */ + def emitAnnotations(mw: asm.MethodVisitor, annotations: List[Annotation]): Unit = + for(annot <- annotations; if shouldEmitAnnotation(annot)) { + val typ = annot.tree.tpe + val assocs = assocsFromApply(annot.tree) + val av = mw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) + emitAssocs(av, assocs, BCodeHelpers.this)(this) + } + + /* + * must-single-thread + */ + def emitAnnotations(fw: asm.FieldVisitor, annotations: List[Annotation]): Unit = + for(annot <- annotations; if shouldEmitAnnotation(annot)) { + val typ = annot.tree.tpe + val assocs = assocsFromApply(annot.tree) + val av = fw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) + emitAssocs(av, assocs, BCodeHelpers.this)(this) + } + + /* + * must-single-thread + */ + def emitParamNames(jmethod: asm.MethodVisitor, params: List[Symbol]) = + for param <- params do + var access = asm.Opcodes.ACC_FINAL + if param.is(Artifact) then access |= asm.Opcodes.ACC_SYNTHETIC + jmethod.visitParameter(param.name.mangledString, access) + + /* + * must-single-thread + */ + def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[Annotation]]): Unit = + val annotationss = pannotss map (_ filter shouldEmitAnnotation) + if (annotationss forall (_.isEmpty)) return + for ((annots, idx) <- annotationss.zipWithIndex; + annot <- annots) { + val typ = annot.tree.tpe + val assocs = assocsFromApply(annot.tree) + val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, typeDescriptor(typ.asInstanceOf[Type]), isRuntimeVisible(annot)) + emitAssocs(pannVisitor, assocs, BCodeHelpers.this)(this) + } + + + private def shouldEmitAnnotation(annot: Annotation): Boolean = { + annot.symbol.is(JavaDefined) && + retentionPolicyOf(annot) != AnnotationRetentionSourceAttr + } + + private def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, Object)], bcodeStore: BCodeHelpers) + (innerClasesStore: bcodeStore.BCInnerClassGen) = { + for ((name, value) <- assocs) + emitArgument(av, name.mangledString, value.asInstanceOf[Tree], bcodeStore)(innerClasesStore) + av.visitEnd() + } + + private def emitArgument(av: AnnotationVisitor, + name: String, + arg: Tree, bcodeStore: BCodeHelpers)(innerClasesStore: bcodeStore.BCInnerClassGen): Unit = { + val narg = normalizeArgument(arg) + // Transformation phases are not run on annotation trees, so we need to run + // `constToLiteral` at this point. + val t = atPhase(erasurePhase)(constToLiteral(narg)) + t match { + case Literal(const @ Constant(_)) => + const.tag match { + case BooleanTag | ByteTag | ShortTag | CharTag | IntTag | LongTag | FloatTag | DoubleTag => av.visit(name, const.value) + case StringTag => + assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant` + av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag + case ClazzTag => av.visit(name, typeToTypeKind(TypeErasure.erasure(const.typeValue))(bcodeStore)(innerClasesStore).toASMType) + } + case Ident(nme.WILDCARD) => + // An underscore argument indicates that we want to use the default value for this parameter, so do not emit anything + case t: tpd.RefTree if t.symbol.owner.linkedClass.isAllOf(JavaEnumTrait) => + val edesc = innerClasesStore.typeDescriptor(t.tpe) // the class descriptor of the enumeration class. + val evalue = t.symbol.javaSimpleName // value the actual enumeration value. + av.visitEnum(name, edesc, evalue) + case t: SeqLiteral => + val arrAnnotV: AnnotationVisitor = av.visitArray(name) + for (arg <- t.elems) { emitArgument(arrAnnotV, null, arg, bcodeStore)(innerClasesStore) } + arrAnnotV.visitEnd() + + case Apply(fun, args) if fun.symbol == defn.ArrayClass.primaryConstructor || + toDenot(fun.symbol).owner == defn.ArrayClass.linkedClass && fun.symbol.name == nme.apply => + val arrAnnotV: AnnotationVisitor = av.visitArray(name) + + var actualArgs = if (fun.tpe.isImplicitMethod) { + // generic array method, need to get implicit argument out of the way + fun.asInstanceOf[Apply].args + } else args + + val flatArgs = actualArgs.flatMap { arg => + normalizeArgument(arg) match { + case t: tpd.SeqLiteral => t.elems + case e => List(e) + } + } + for(arg <- flatArgs) { + emitArgument(arrAnnotV, null, arg, bcodeStore)(innerClasesStore) + } + arrAnnotV.visitEnd() + /* + case sb @ ScalaSigBytes(bytes) => + // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files) + // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure. + if (sb.fitsInOneString) { + av.visit(name, BCodeAsmCommon.strEncode(sb)) + } else { + val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name) + for(arg <- BCodeAsmCommon.arrEncode(sb)) { arrAnnotV.visit(name, arg) } + arrAnnotV.visitEnd() + } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape. + */ + case t @ Apply(constr, args) if t.tpe.classSymbol.is(JavaAnnotation) => + val typ = t.tpe.classSymbol.denot.info + val assocs = assocsFromApply(t) + val desc = innerClasesStore.typeDescriptor(typ) // the class descriptor of the nested annotation class + val nestedVisitor = av.visitAnnotation(name, desc) + emitAssocs(nestedVisitor, assocs, bcodeStore)(innerClasesStore) + + case t => + report.error(em"Annotation argument is not a constant", t.sourcePos) + } + } + + private def normalizeArgument(arg: Tree): Tree = arg match { + case Trees.NamedArg(_, arg1) => normalizeArgument(arg1) + case Trees.Typed(arg1, _) => normalizeArgument(arg1) + case _ => arg + } + + private def isRuntimeVisible(annot: Annotation): Boolean = + if (toDenot(annot.tree.tpe.typeSymbol).hasAnnotation(AnnotationRetentionAttr)) + retentionPolicyOf(annot) == AnnotationRetentionRuntimeAttr + else { + // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the + // annotation is emitted with visibility `RUNTIME` + // dotty bug: #389 + true + } + + private def retentionPolicyOf(annot: Annotation): Symbol = + annot.tree.tpe.typeSymbol.getAnnotation(AnnotationRetentionAttr). + flatMap(_.argument(0).map(_.tpe.termSymbol)).getOrElse(AnnotationRetentionClassAttr) + + private def assocsFromApply(tree: Tree): List[(Name, Tree)] = { + tree match { + case Block(_, expr) => assocsFromApply(expr) + case Apply(fun, args) => + fun.tpe.widen match { + case MethodType(names) => + (names zip args).filter { + case (_, t: tpd.Ident) if (t.tpe.normalizedPrefix eq NoPrefix) => false + case _ => true + } + } + } + } + } // end of trait BCAnnotGen + + trait BCJGenSigGen { + import int.given + + def getCurrentCUnit(): CompilationUnit + + /** + * Generates the generic signature for `sym` before erasure. + * + * @param sym The symbol for which to generate a signature. + * @param owner The owner of `sym`. + * @return The generic signature of `sym` before erasure, as specified in the Java Virtual + * Machine Specification, §4.3.4, or `null` if `sym` doesn't need a generic signature. + * @see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.3.4 + */ + def getGenericSignature(sym: Symbol, owner: Symbol): String = { + atPhase(erasurePhase) { + val memberTpe = + if (sym.is(Method)) sym.denot.info + else owner.denot.thisType.memberInfo(sym) + getGenericSignatureHelper(sym, owner, memberTpe).orNull + } + } + + } // end of trait BCJGenSigGen + + trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen { + + /* Add a forwarder for method m. Used only from addForwarders(). + * + * must-single-thread + */ + private def addForwarder(jclass: asm.ClassVisitor, module: Symbol, m: Symbol, isSynthetic: Boolean): Unit = { + val moduleName = internalName(module) + val methodInfo = module.thisType.memberInfo(m) + val paramJavaTypes: List[BType] = methodInfo.firstParamTypes map toTypeKind + // val paramNames = 0 until paramJavaTypes.length map ("x_" + _) + + /* Forwarders must not be marked final, + * as the JVM will not allow redefinition of a final static method, + * and we don't know what classes might be subclassing the companion class. See SI-4827. + */ + // TODO: evaluate the other flags we might be dropping on the floor here. + val flags = GenBCodeOps.PublicStatic | ( + if (m.is(JavaVarargs)) asm.Opcodes.ACC_VARARGS else 0 + ) | ( + if (isSynthetic) asm.Opcodes.ACC_SYNTHETIC else 0 + ) + + // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize } + val jgensig = getStaticForwarderGenericSignature(m, module) + val (throws, others) = m.annotations.partition(_.symbol eq defn.ThrowsAnnot) + val thrownExceptions: List[String] = getExceptions(throws) + + val jReturnType = toTypeKind(methodInfo.resultType) + val mdesc = MethodBType(paramJavaTypes, jReturnType).descriptor + val mirrorMethodName = m.javaSimpleName + val mirrorMethod: asm.MethodVisitor = jclass.visitMethod( + flags, + mirrorMethodName, + mdesc, + jgensig, + mkArrayS(thrownExceptions) + ) + + emitAnnotations(mirrorMethod, others) + val params: List[Symbol] = Nil // backend uses this to emit annotations on parameter lists of forwarders + // to static methods of companion class + // Old assumption: in Dotty this link does not exists: there is no way to get from method type + // to inner symbols of DefDef + // TODO: now we have paramSymss and could use it here. + emitParamAnnotations(mirrorMethod, params.map(_.annotations)) + + mirrorMethod.visitCode() + + mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, str.MODULE_INSTANCE_FIELD, symDescriptor(module)) + + var index = 0 + for(jparamType <- paramJavaTypes) { + mirrorMethod.visitVarInsn(jparamType.typedOpcode(asm.Opcodes.ILOAD), index) + assert(!jparamType.isInstanceOf[MethodBType], jparamType) + index += jparamType.size + } + + mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).descriptor, false) + mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN)) + + mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments + mirrorMethod.visitEnd() + + } + + /* Add forwarders for all methods defined in `module` that don't conflict + * with methods in the companion class of `module`. A conflict arises when + * a method with the same name is defined both in a class and its companion object: + * method signature is not taken into account. + * + * must-single-thread + */ + def addForwarders(jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol): Unit = { + assert(moduleClass.is(ModuleClass), moduleClass) + report.debuglog(s"Dumping mirror class for object: $moduleClass") + + val linkedClass = moduleClass.companionClass + lazy val conflictingNames: Set[Name] = { + (linkedClass.info.allMembers.collect { case d if d.name.isTermName => d.name }).toSet + } + report.debuglog(s"Potentially conflicting names for forwarders: $conflictingNames") + + for (m0 <- sortedMembersBasedOnFlags(moduleClass.info, required = Method, excluded = ExcludedForwarder)) { + val m = if (m0.is(Bridge)) m0.nextOverriddenSymbol else m0 + if (m == NoSymbol) + report.log(s"$m0 is a bridge method that overrides nothing, something went wrong in a previous phase.") + else if (m.isType || m.is(Deferred) || (m.owner eq defn.ObjectClass) || m.isConstructor || m.name.is(ExpandedName)) + report.debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'") + else if (conflictingNames(m.name)) + report.log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}") + else if (m.accessBoundary(defn.RootClass) ne defn.RootClass) + report.log(s"No forwarder for non-public member $m") + else { + report.log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'") + // It would be simpler to not generate forwarders for these methods, + // but that wouldn't be binary-compatible with Scala 3.0.0, so instead + // we generate ACC_SYNTHETIC forwarders so Java compilers ignore them. + val isSynthetic = + m0.name.is(NameKinds.SyntheticSetterName) || + // Only hide bridges generated at Erasure, mixin forwarders are also + // marked as bridge but shouldn't be hidden since they don't have a + // non-bridge overload. + m0.is(Bridge) && m0.initial.validFor.firstPhaseId == erasurePhase.next.id + addForwarder(jclass, moduleClass, m, isSynthetic) + } + } + } + + /** The members of this type that have all of `required` flags but none of `excluded` flags set. + * The members are sorted by name and signature to guarantee a stable ordering. + */ + private def sortedMembersBasedOnFlags(tp: Type, required: Flag, excluded: FlagSet): List[Symbol] = { + // The output of `memberNames` is a Set, sort it to guarantee a stable ordering. + val names = tp.memberNames(takeAllFilter).toSeq.sorted + val buffer = mutable.ListBuffer[Symbol]() + names.foreach { name => + buffer ++= tp.memberBasedOnFlags(name, required, excluded) + .alternatives.sortBy(_.signature)(Signature.lexicographicOrdering).map(_.symbol) + } + buffer.toList + } + + /* + * Quoting from JVMS 4.7.5 The Exceptions Attribute + * "The Exceptions attribute indicates which checked exceptions a method may throw. + * There may be at most one Exceptions attribute in each method_info structure." + * + * The contents of that attribute are determined by the `String[] exceptions` argument to ASM's ClassVisitor.visitMethod() + * This method returns such list of internal names. + * + * must-single-thread + */ + def getExceptions(excs: List[Annotation]): List[String] = { + for (case ThrownException(exc) <- excs.distinct) + yield internalName(TypeErasure.erasure(exc).classSymbol) + } + } // end of trait BCForwardersGen + + trait BCClassGen extends BCInnerClassGen { + + // Used as threshold above which a tableswitch bytecode instruction is preferred over a lookupswitch. + // There's a space tradeoff between these multi-branch instructions (details in the JVM spec). + // The particular value in use for `MIN_SWITCH_DENSITY` reflects a heuristic. + val MIN_SWITCH_DENSITY = 0.7 + + /* + * Add public static final field serialVersionUID with value `id` + * + * can-multi-thread + */ + def addSerialVUID(id: Long, jclass: asm.ClassVisitor): Unit = { + // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)` + jclass.visitField( + GenBCodeOps.PrivateStaticFinal, + "serialVersionUID", + "J", + null, // no java-generic-signature + java.lang.Long.valueOf(id) + ).visitEnd() + } + } // end of trait BCClassGen + + /* functionality for building plain and mirror classes */ + abstract class JCommonBuilder + extends BCInnerClassGen + with BCAnnotGen + with BCForwardersGen + with BCPickles { } + + /* builder of mirror classes */ + class JMirrorBuilder extends JCommonBuilder { + + private var cunit: CompilationUnit = _ + def getCurrentCUnit(): CompilationUnit = cunit; + + /* Generate a mirror class for a top-level module. A mirror class is a class + * containing only static methods that forward to the corresponding method + * on the MODULE instance of the given Scala object. It will only be + * generated if there is no companion class: if there is, an attempt will + * instead be made to add the forwarder methods to the companion class. + * + * must-single-thread + */ + def genMirrorClass(moduleClass: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = { + assert(moduleClass.is(ModuleClass)) + assert(moduleClass.companionClass == NoSymbol, moduleClass) + this.cunit = cunit + val bType = mirrorClassBTypeFromSymbol(moduleClass) + val moduleName = internalName(moduleClass) // + "$" + val mirrorName = bType.internalName + + val mirrorClass = new asm.tree.ClassNode + mirrorClass.visit( + classfileVersion, + bType.info.flags, + mirrorName, + null /* no java-generic-signature */, + ObjectRef.internalName, + EMPTY_STRING_ARRAY + ) + + if (emitSource) { + mirrorClass.visitSource("" + cunit.source.file.name, + null /* SourceDebugExtension */) + } + + val ssa = None // getAnnotPickle(mirrorName, if (moduleClass.is(Module)) moduleClass.companionClass else moduleClass.companionModule) + mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) + emitAnnotations(mirrorClass, moduleClass.annotations ++ ssa) + + addForwarders(mirrorClass, mirrorName, moduleClass) + mirrorClass.visitEnd() + + moduleClass.name // this side-effect is necessary, really. + + mirrorClass + } + + } // end of class JMirrorBuilder + + trait JAndroidBuilder { + self: BCInnerClassGen => + + /* From the reference documentation of the Android SDK: + * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`. + * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`, + * which is an object implementing the `Parcelable.Creator` interface. + */ + val androidFieldName = "CREATOR".toTermName + + lazy val AndroidParcelableInterface : Symbol = NoSymbol // getClassIfDefined("android.os.Parcelable") + lazy val AndroidCreatorClass : Symbol = NoSymbol // getClassIfDefined("android.os.Parcelable$Creator") + + /* + * must-single-thread + */ + def isAndroidParcelableClass(sym: Symbol) = + (AndroidParcelableInterface != NoSymbol) && + (sym.info.parents.map(_.typeSymbol) contains AndroidParcelableInterface) + + /* + * must-single-thread + */ + def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String): Unit = { + val androidCreatorType = getClassBType(AndroidCreatorClass) + val tdesc_creator = androidCreatorType.descriptor + + cnode.visitField( + GenBCodeOps.PublicStaticFinal, + "CREATOR", + tdesc_creator, + null, // no java-generic-signature + null // no initial value + ).visitEnd() + + val moduleName = (thisName + "$") + + // GETSTATIC `moduleName`.MODULE$ : `moduleName`; + clinit.visitFieldInsn( + asm.Opcodes.GETSTATIC, + moduleName, + str.MODULE_INSTANCE_FIELD, + "L" + moduleName + ";" + ) + + // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator; + val bt = MethodBType(Nil, androidCreatorType) + clinit.visitMethodInsn( + asm.Opcodes.INVOKEVIRTUAL, + moduleName, + "CREATOR", + bt.descriptor, + false + ) + + // PUTSTATIC `thisName`.CREATOR; + clinit.visitFieldInsn( + asm.Opcodes.PUTSTATIC, + thisName, + "CREATOR", + tdesc_creator + ) + } + + } // end of trait JAndroidBuilder + + /** + * This method returns the BType for a type reference, for example a parameter type. + * + * If the result is a ClassBType for a nested class, it is added to the innerClassBufferASM. + * + * If `t` references a class, toTypeKind ensures that the class is not an implementation class. + * See also comment on getClassBTypeAndRegisterInnerClass, which is invoked for implementation + * classes. + */ + private def typeToTypeKind(tp: Type)(ct: BCodeHelpers)(storage: ct.BCInnerClassGen): ct.bTypes.BType = { + import ct.bTypes._ + val defn = ctx.definitions + import coreBTypes._ + import Types._ + /** + * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int. + * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType. + */ + def primitiveOrClassToBType(sym: Symbol): BType = { + assert(sym.isClass, sym) + assert(sym != defn.ArrayClass || compilingArray, sym) + primitiveTypeMap.getOrElse(sym, storage.getClassBType(sym)).asInstanceOf[BType] + } + + /** + * When compiling Array.scala, the type parameter T is not erased and shows up in method + * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference. + */ + def nonClassTypeRefToBType(sym: Symbol): ClassBType = { + assert(sym.isType && compilingArray, sym) + ObjectRef.asInstanceOf[ct.bTypes.ClassBType] + } + + tp.widenDealias match { + case JavaArrayType(el) =>ArrayBType(typeToTypeKind(el)(ct)(storage)) // Array type such as Array[Int] (kept by erasure) + case t: TypeRef => + t.info match { + + case _ => + if (!t.symbol.isClass) nonClassTypeRefToBType(t.symbol) // See comment on nonClassTypeRefToBType + else primitiveOrClassToBType(t.symbol) // Common reference to a type such as scala.Int or java.lang.String + } + case Types.ClassInfo(_, sym, _, _, _) => primitiveOrClassToBType(sym) // We get here, for example, for genLoadModule, which invokes toTypeKind(moduleClassSymbol.info) + + /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for + * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning. + * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala. + */ + case a @ AnnotatedType(t, _) => + report.debuglog(s"typeKind of annotated type $a") + typeToTypeKind(t)(ct)(storage) + + /* The cases below should probably never occur. They are kept for now to avoid introducing + * new compiler crashes, but we added a warning. The compiler / library bootstrap and the + * test suite don't produce any warning. + */ + + case tp => + report.warning( + s"an unexpected type representation reached the compiler backend while compiling ${ctx.compilationUnit}: $tp. " + + "If possible, please file a bug on https://github.com/lampepfl/dotty/issues") + + tp match { + case tp: ThisType if tp.cls == defn.ArrayClass => ObjectRef.asInstanceOf[ct.bTypes.ClassBType] // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test + case tp: ThisType => storage.getClassBType(tp.cls) + // case t: SingletonType => primitiveOrClassToBType(t.classSymbol) + case t: SingletonType => typeToTypeKind(t.underlying)(ct)(storage) + case t: RefinedType => typeToTypeKind(t.parent)(ct)(storage) //parents.map(_.toTypeKind(ct)(storage).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b)) + } + } + } + + private def getGenericSignatureHelper(sym: Symbol, owner: Symbol, memberTpe: Type)(using Context): Option[String] = { + if (needsGenericSignature(sym)) { + val erasedTypeSym = TypeErasure.fullErasure(sym.denot.info).typeSymbol + if (erasedTypeSym.isPrimitiveValueClass) { + // Suppress signatures for symbols whose types erase in the end to primitive + // value types. This is needed to fix #7416. + None + } else { + val jsOpt = GenericSignatures.javaSig(sym, memberTpe) + if (ctx.settings.XverifySignatures.value) { + jsOpt.foreach(verifySignature(sym, _)) + } + + jsOpt + } + } else { + None + } + } + + private def verifySignature(sym: Symbol, sig: String)(using Context): Unit = { + import scala.tools.asm.util.CheckClassAdapter + def wrap(body: => Unit): Unit = { + try body + catch { + case ex: Throwable => + report.error( + em"""|compiler bug: created invalid generic signature for $sym in ${sym.denot.owner.showFullName} + |signature: $sig + |if this is reproducible, please report bug at https://github.com/lampepfl/dotty/issues + """, sym.sourcePos) + throw ex + } + } + + wrap { + if (sym.is(Method)) { + CheckClassAdapter.checkMethodSignature(sig) + } + else if (sym.isTerm) { + CheckClassAdapter.checkFieldSignature(sig) + } + else { + CheckClassAdapter.checkClassSignature(sig) + } + } + } + + // @M don't generate java generics sigs for (members of) implementation + // classes, as they are monomorphic (TODO: ok?) + private final def needsGenericSignature(sym: Symbol): Boolean = !( + // pp: this condition used to include sym.hasexpandedname, but this leads + // to the total loss of generic information if a private member is + // accessed from a closure: both the field and the accessor were generated + // without it. This is particularly bad because the availability of + // generic information could disappear as a consequence of a seemingly + // unrelated change. + ctx.base.settings.YnoGenericSig.value + || sym.is(Artifact) + || sym.isAllOf(LiftedMethod) + || sym.is(Bridge) + ) + + private def getStaticForwarderGenericSignature(sym: Symbol, moduleClass: Symbol): String = { + // scala/bug#3452 Static forwarder generation uses the same erased signature as the method if forwards to. + // By rights, it should use the signature as-seen-from the module class, and add suitable + // primitive and value-class boxing/unboxing. + // But for now, just like we did in mixin, we just avoid writing a wrong generic signature + // (one that doesn't erase to the actual signature). See run/t3452b for a test case. + + val memberTpe = atPhase(erasurePhase) { moduleClass.denot.thisType.memberInfo(sym) } + val erasedMemberType = ElimErasedValueType.elimEVT(TypeErasure.transformInfo(sym, memberTpe)) + if (erasedMemberType =:= sym.denot.info) + getGenericSignatureHelper(sym, moduleClass, memberTpe).orNull + else null + } + + def abort(msg: String): Nothing = { + report.error(msg) + throw new RuntimeException(msg) + } + + private def compilingArray(using Context) = + ctx.compilationUnit.source.file.name == "Array.scala" +} + +object BCodeHelpers { + + class InvokeStyle(val style: Int) extends AnyVal { + import InvokeStyle._ + def isVirtual: Boolean = this == Virtual + def isStatic : Boolean = this == Static + def isSpecial: Boolean = this == Special + def isSuper : Boolean = this == Super + + def hasInstance = this != Static + } + + object InvokeStyle { + val Virtual = new InvokeStyle(0) // InvokeVirtual or InvokeInterface + val Static = new InvokeStyle(1) // InvokeStatic + val Special = new InvokeStyle(2) // InvokeSpecial (private methods, constructors) + val Super = new InvokeStyle(3) // InvokeSpecial (super calls) + } + + /** An attachment on Apply nodes indicating that it should be compiled with + * `invokespecial` instead of `invokevirtual`. This is used for static + * forwarders. + * See BCodeSkelBuilder.makeStaticForwarder for more details. + */ + val UseInvokeSpecial = new dotc.util.Property.Key[Unit] + +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala new file mode 100644 index 000000000000..5eb8d7a52aa2 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala @@ -0,0 +1,726 @@ +package dotty.tools +package backend +package jvm + +import scala.language.unsafeNulls + +import scala.tools.asm +import scala.annotation.switch +import Primitives.{NE, EQ, TestOp, ArithmeticOp} +import scala.tools.asm.tree.MethodInsnNode +import dotty.tools.dotc.report + +/* + * A high-level facade to the ASM API for bytecode generation. + * + * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded + * @version 1.0 + * + */ +trait BCodeIdiomatic extends caps.Pure { + val int: DottyBackendInterface + final lazy val bTypes = new BTypesFromSymbols[int.type](int) + + import int.{_, given} + import bTypes._ + import coreBTypes._ + + + + lazy val target = + val releaseValue = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) + val targetValue = Option(ctx.settings.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) + val defaultTarget = "8" + (releaseValue, targetValue) match + case (Some(release), None) => release + case (None, Some(target)) => target + case (Some(release), Some(_)) => + report.warning(s"The value of ${ctx.settings.XuncheckedJavaOutputVersion.name} was overridden by ${ctx.settings.javaOutputVersion.name}") + release + case (None, None) => "8" // least supported version by default + + + // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings + lazy val classfileVersion: Int = target match { + case "8" => asm.Opcodes.V1_8 + case "9" => asm.Opcodes.V9 + case "10" => asm.Opcodes.V10 + case "11" => asm.Opcodes.V11 + case "12" => asm.Opcodes.V12 + case "13" => asm.Opcodes.V13 + case "14" => asm.Opcodes.V14 + case "15" => asm.Opcodes.V15/* + case "16" => asm.Opcodes.V16 + case "17" => asm.Opcodes.V17 + case "18" => asm.Opcodes.V18 + case "19" => asm.Opcodes.V19 + case "20" => asm.Opcodes.V20 + case "21" => asm.Opcodes.V21*/ + } + + lazy val majorVersion: Int = (classfileVersion & 0xFF) + lazy val emitStackMapFrame = (majorVersion >= 50) + + val extraProc: Int = + import GenBCodeOps.addFlagIf + asm.ClassWriter.COMPUTE_MAXS + .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) + + lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName + + val CLASS_CONSTRUCTOR_NAME = "" + val INSTANCE_CONSTRUCTOR_NAME = "" + + val EMPTY_STRING_ARRAY = Array.empty[String] + val EMPTY_INT_ARRAY = Array.empty[Int] + val EMPTY_LABEL_ARRAY = Array.empty[asm.Label] + val EMPTY_BTYPE_ARRAY = Array.empty[BType] + + /* can-multi-thread */ + final def mkArrayB(xs: List[BType]): Array[BType] = { + if (xs.isEmpty) { return EMPTY_BTYPE_ARRAY } + val a = new Array[BType](xs.size); xs.copyToArray(a); a + } + /* can-multi-thread */ + final def mkArrayS(xs: List[String]): Array[String] = { + if (xs.isEmpty) { return EMPTY_STRING_ARRAY } + val a = new Array[String](xs.size); xs.copyToArray(a); a + } + /* can-multi-thread */ + final def mkArrayL(xs: List[asm.Label]): Array[asm.Label] = { + if (xs.isEmpty) { return EMPTY_LABEL_ARRAY } + val a = new Array[asm.Label](xs.size); xs.copyToArray(a); a + } + + /* + * can-multi-thread + */ + final def mkArrayReverse(xs: List[String]): Array[String] = { + val len = xs.size + if (len == 0) { return EMPTY_STRING_ARRAY } + val a = new Array[String](len) + var i = len - 1 + var rest = xs + while (!rest.isEmpty) { + a(i) = rest.head + rest = rest.tail + i -= 1 + } + a + } + + /* + * can-multi-thread + */ + final def mkArrayReverse(xs: List[Int]): Array[Int] = { + val len = xs.size + if (len == 0) { return EMPTY_INT_ARRAY } + val a = new Array[Int](len) + var i = len - 1 + var rest = xs + while (!rest.isEmpty) { + a(i) = rest.head + rest = rest.tail + i -= 1 + } + a + } + + /* Just a namespace for utilities that encapsulate MethodVisitor idioms. + * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role, + * but the methods here allow choosing when to transition from ICode to ASM types + * (including not at all, e.g. for performance). + */ + abstract class JCodeMethodN { + + def jmethod: asm.tree.MethodNode + + import asm.Opcodes; + + final def emit(opc: Int): Unit = { jmethod.visitInsn(opc) } + + /* + * can-multi-thread + */ + final def genPrimitiveArithmetic(op: ArithmeticOp, kind: BType): Unit = { + + import Primitives.{ ADD, SUB, MUL, DIV, REM, NOT } + + op match { + + case ADD => add(kind) + case SUB => sub(kind) + case MUL => mul(kind) + case DIV => div(kind) + case REM => rem(kind) + + case NOT => + if (kind.isIntSizedType) { + emit(Opcodes.ICONST_M1) + emit(Opcodes.IXOR) + } else if (kind == LONG) { + jmethod.visitLdcInsn(java.lang.Long.valueOf(-1)) + jmethod.visitInsn(Opcodes.LXOR) + } else { + abort(s"Impossible to negate an $kind") + } + + case _ => + abort(s"Unknown arithmetic primitive $op") + } + + } // end of method genPrimitiveArithmetic() + + /* + * can-multi-thread + */ + final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType): Unit = { + + import ScalaPrimitivesOps.{ AND, OR, XOR } + + ((op, kind): @unchecked) match { + case (AND, LONG) => emit(Opcodes.LAND) + case (AND, INT) => emit(Opcodes.IAND) + case (AND, _) => + emit(Opcodes.IAND) + if (kind != BOOL) { emitT2T(INT, kind) } + + case (OR, LONG) => emit(Opcodes.LOR) + case (OR, INT) => emit(Opcodes.IOR) + case (OR, _) => + emit(Opcodes.IOR) + if (kind != BOOL) { emitT2T(INT, kind) } + + case (XOR, LONG) => emit(Opcodes.LXOR) + case (XOR, INT) => emit(Opcodes.IXOR) + case (XOR, _) => + emit(Opcodes.IXOR) + if (kind != BOOL) { emitT2T(INT, kind) } + } + + } // end of method genPrimitiveLogical() + + /* + * can-multi-thread + */ + final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType): Unit = { + + import ScalaPrimitivesOps.{ LSL, ASR, LSR } + + ((op, kind): @unchecked) match { + case (LSL, LONG) => emit(Opcodes.LSHL) + case (LSL, INT) => emit(Opcodes.ISHL) + case (LSL, _) => + emit(Opcodes.ISHL) + emitT2T(INT, kind) + + case (ASR, LONG) => emit(Opcodes.LSHR) + case (ASR, INT) => emit(Opcodes.ISHR) + case (ASR, _) => + emit(Opcodes.ISHR) + emitT2T(INT, kind) + + case (LSR, LONG) => emit(Opcodes.LUSHR) + case (LSR, INT) => emit(Opcodes.IUSHR) + case (LSR, _) => + emit(Opcodes.IUSHR) + emitT2T(INT, kind) + } + + } // end of method genPrimitiveShift() + + /* Creates a new `StringBuilder` instance with the requested capacity + * + * can-multi-thread + */ + final def genNewStringBuilder(size: Int): Unit = { + jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName) + jmethod.visitInsn(Opcodes.DUP) + jmethod.visitLdcInsn(Integer.valueOf(size)) + invokespecial( + JavaStringBuilderClassName, + INSTANCE_CONSTRUCTOR_NAME, + "(I)V", + itf = false + ) + } + + /* Issue a call to `StringBuilder#append` for the right element type + * + * can-multi-thread + */ + final def genStringBuilderAppend(elemType: BType): Unit = { + val paramType = elemType match { + case ct: ClassBType if ct.isSubtypeOf(StringRef) => StringRef + case ct: ClassBType if ct.isSubtypeOf(jlStringBufferRef) => jlStringBufferRef + case ct: ClassBType if ct.isSubtypeOf(jlCharSequenceRef) => jlCharSequenceRef + // Don't match for `ArrayBType(CHAR)`, even though StringBuilder has such an overload: + // `"a" + Array('b')` should NOT be "ab", but "a[C@...". + case _: RefBType => ObjectRef + // jlStringBuilder does not have overloads for byte and short, but we can just use the int version + case BYTE | SHORT => INT + case pt: PrimitiveBType => pt + } + val bt = MethodBType(List(paramType), jlStringBuilderRef) + invokevirtual(JavaStringBuilderClassName, "append", bt.descriptor) + } + + /* Extract the built `String` from the `StringBuilder` + * + * can-multi-thread + */ + final def genStringBuilderEnd: Unit = { + invokevirtual(JavaStringBuilderClassName, "toString", genStringBuilderEndDesc) + } + // Use ClassBType refs instead of plain string literal to make sure that needed ClassBTypes are initialized and reachable + private lazy val genStringBuilderEndDesc = MethodBType(Nil, StringRef).descriptor + + /* Concatenate top N arguments on the stack with `StringConcatFactory#makeConcatWithConstants` + * (only works for JDK 9+) + * + * can-multi-thread + */ + final def genIndyStringConcat( + recipe: String, + argTypes: Seq[asm.Type], + constants: Seq[String] + ): Unit = { + jmethod.visitInvokeDynamicInsn( + "makeConcatWithConstants", + asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*), + coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle, + (recipe +: constants):_* + ) + } + + /* + * Emits one or more conversion instructions based on the types given as arguments. + * + * @param from The type of the value to be converted into another type. + * @param to The type the value will be converted into. + * + * can-multi-thread + */ + final def emitT2T(from: BType, to: BType): Unit = { + + assert( + from.isNonVoidPrimitiveType && to.isNonVoidPrimitiveType, + s"Cannot emit primitive conversion from $from to $to" + ) + + def pickOne(opcs: Array[Int]): Unit = { // TODO index on to.sort + val chosen = (to: @unchecked) match { + case BYTE => opcs(0) + case SHORT => opcs(1) + case CHAR => opcs(2) + case INT => opcs(3) + case LONG => opcs(4) + case FLOAT => opcs(5) + case DOUBLE => opcs(6) + } + if (chosen != -1) { emit(chosen) } + } + + if (from == to) { return } + // the only conversion involving BOOL that is allowed is (BOOL -> BOOL) + assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to") + + // We're done with BOOL already + from match { + + // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" + + case BYTE => pickOne(JCodeMethodN.fromByteT2T) + case SHORT => pickOne(JCodeMethodN.fromShortT2T) + case CHAR => pickOne(JCodeMethodN.fromCharT2T) + case INT => pickOne(JCodeMethodN.fromIntT2T) + + case FLOAT => + import asm.Opcodes.{ F2L, F2D, F2I } + to match { + case LONG => emit(F2L) + case DOUBLE => emit(F2D) + case _ => emit(F2I); emitT2T(INT, to) + } + + case LONG => + import asm.Opcodes.{ L2F, L2D, L2I } + to match { + case FLOAT => emit(L2F) + case DOUBLE => emit(L2D) + case _ => emit(L2I); emitT2T(INT, to) + } + + case DOUBLE => + import asm.Opcodes.{ D2L, D2F, D2I } + to match { + case FLOAT => emit(D2F) + case LONG => emit(D2L) + case _ => emit(D2I); emitT2T(INT, to) + } + } + } // end of emitT2T() + + // can-multi-thread + final def boolconst(b: Boolean): Unit = { iconst(if (b) 1 else 0) } + + // can-multi-thread + final def iconst(cst: Int): Unit = { + if (cst >= -1 && cst <= 5) { + emit(Opcodes.ICONST_0 + cst) + } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) { + jmethod.visitIntInsn(Opcodes.BIPUSH, cst) + } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) { + jmethod.visitIntInsn(Opcodes.SIPUSH, cst) + } else { + jmethod.visitLdcInsn(Integer.valueOf(cst)) + } + } + + // can-multi-thread + final def lconst(cst: Long): Unit = { + if (cst == 0L || cst == 1L) { + emit(Opcodes.LCONST_0 + cst.asInstanceOf[Int]) + } else { + jmethod.visitLdcInsn(java.lang.Long.valueOf(cst)) + } + } + + // can-multi-thread + final def fconst(cst: Float): Unit = { + val bits: Int = java.lang.Float.floatToIntBits(cst) + if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2 + emit(Opcodes.FCONST_0 + cst.asInstanceOf[Int]) + } else { + jmethod.visitLdcInsn(java.lang.Float.valueOf(cst)) + } + } + + // can-multi-thread + final def dconst(cst: Double): Unit = { + val bits: Long = java.lang.Double.doubleToLongBits(cst) + if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d + emit(Opcodes.DCONST_0 + cst.asInstanceOf[Int]) + } else { + jmethod.visitLdcInsn(java.lang.Double.valueOf(cst)) + } + } + + // can-multi-thread + final def newarray(elem: BType): Unit = { + elem match { + case c: RefBType => + /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which isObject. */ + jmethod.visitTypeInsn(Opcodes.ANEWARRAY, c.classOrArrayType) + case _ => + assert(elem.isNonVoidPrimitiveType) + val rand = { + // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" + elem match { + case BOOL => Opcodes.T_BOOLEAN + case BYTE => Opcodes.T_BYTE + case SHORT => Opcodes.T_SHORT + case CHAR => Opcodes.T_CHAR + case INT => Opcodes.T_INT + case LONG => Opcodes.T_LONG + case FLOAT => Opcodes.T_FLOAT + case DOUBLE => Opcodes.T_DOUBLE + } + } + jmethod.visitIntInsn(Opcodes.NEWARRAY, rand) + } + } + + + final def load( idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread + final def store(idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread + final def iinc( idx: Int, increment: Int): Unit = jmethod.visitIincInsn(idx, increment) // can-multi-thread + + final def aload( tk: BType): Unit = { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread + final def astore(tk: BType): Unit = { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread + + final def neg(tk: BType): Unit = { emitPrimitive(JCodeMethodN.negOpcodes, tk) } // can-multi-thread + final def add(tk: BType): Unit = { emitPrimitive(JCodeMethodN.addOpcodes, tk) } // can-multi-thread + final def sub(tk: BType): Unit = { emitPrimitive(JCodeMethodN.subOpcodes, tk) } // can-multi-thread + final def mul(tk: BType): Unit = { emitPrimitive(JCodeMethodN.mulOpcodes, tk) } // can-multi-thread + final def div(tk: BType): Unit = { emitPrimitive(JCodeMethodN.divOpcodes, tk) } // can-multi-thread + final def rem(tk: BType): Unit = { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread + + // can-multi-thread + final def invokespecial(owner: String, name: String, desc: String, itf: Boolean): Unit = { + emitInvoke(Opcodes.INVOKESPECIAL, owner, name, desc, itf) + } + // can-multi-thread + final def invokestatic(owner: String, name: String, desc: String, itf: Boolean): Unit = { + emitInvoke(Opcodes.INVOKESTATIC, owner, name, desc, itf) + } + // can-multi-thread + final def invokeinterface(owner: String, name: String, desc: String): Unit = { + emitInvoke(Opcodes.INVOKEINTERFACE, owner, name, desc, itf = true) + } + // can-multi-thread + final def invokevirtual(owner: String, name: String, desc: String): Unit = { + emitInvoke(Opcodes.INVOKEVIRTUAL, owner, name, desc, itf = false) + } + + def emitInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean): Unit = { + val node = new MethodInsnNode(opcode, owner, name, desc, itf) + jmethod.instructions.add(node) + } + + + // can-multi-thread + final def goTo(label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.GOTO, label) } + // can-multi-thread + final def emitIF(cond: TestOp, label: asm.Label): Unit = { jmethod.visitJumpInsn(cond.opcodeIF(), label) } + // can-multi-thread + final def emitIF_ICMP(cond: TestOp, label: asm.Label): Unit = { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) } + // can-multi-thread + final def emitIF_ACMP(cond: TestOp, label: asm.Label): Unit = { + assert((cond == EQ) || (cond == NE), cond) + val opc = (if (cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE) + jmethod.visitJumpInsn(opc, label) + } + // can-multi-thread + final def emitIFNONNULL(label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) } + // can-multi-thread + final def emitIFNULL (label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNULL, label) } + + // can-multi-thread + final def emitRETURN(tk: BType): Unit = { + if (tk == UNIT) { emit(Opcodes.RETURN) } + else { emitTypeBased(JCodeMethodN.returnOpcodes, tk) } + } + + /* Emits one of tableswitch or lookoupswitch. + * + * can-multi-thread + */ + final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double): Unit = { + assert(keys.length == branches.length) + + // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only. + // Similar to what javac emits for a switch statement consisting only of a default case. + if (keys.length == 0) { + jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) + return + } + + // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort + var i = 1 + while (i < keys.length) { + var j = 1 + while (j <= keys.length - i) { + if (keys(j) < keys(j - 1)) { + val tmp = keys(j) + keys(j) = keys(j - 1) + keys(j - 1) = tmp + val tmpL = branches(j) + branches(j) = branches(j - 1) + branches(j - 1) = tmpL + } + j += 1 + } + i += 1 + } + + // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011) + i = 1 + while (i < keys.length) { + if (keys(i-1) == keys(i)) { + abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.") + } + i += 1 + } + + val keyMin = keys(0) + val keyMax = keys(keys.length - 1) + + val isDenseEnough: Boolean = { + /* Calculate in long to guard against overflow. TODO what overflow? */ + val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double] + val klenD: Double = keys.length + val kdensity: Double = (klenD / keyRangeD) + + kdensity >= minDensity + } + + if (isDenseEnough) { + // use a table in which holes are filled with defaultBranch. + val keyRange = (keyMax - keyMin + 1) + val newBranches = new Array[asm.Label](keyRange) + var oldPos = 0 + var i = 0 + while (i < keyRange) { + val key = keyMin + i; + if (keys(oldPos) == key) { + newBranches(i) = branches(oldPos) + oldPos += 1 + } else { + newBranches(i) = defaultBranch + } + i += 1 + } + assert(oldPos == keys.length, "emitSWITCH") + jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*) + } else { + jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) + } + } + + // internal helpers -- not part of the public API of `jcode` + // don't make private otherwise inlining will suffer + + // can-multi-thread + final def emitVarInsn(opc: Int, idx: Int, tk: BType): Unit = { + assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc) + jmethod.visitVarInsn(tk.typedOpcode(opc), idx) + } + + // ---------------- array load and store ---------------- + + // can-multi-thread + final def emitTypeBased(opcs: Array[Int], tk: BType): Unit = { + assert(tk != UNIT, tk) + val opc = { + if (tk.isRef) { opcs(0) } + else if (tk.isIntSizedType) { + (tk: @unchecked) match { + case BOOL | BYTE => opcs(1) + case SHORT => opcs(2) + case CHAR => opcs(3) + case INT => opcs(4) + } + } else { + (tk: @unchecked) match { + case LONG => opcs(5) + case FLOAT => opcs(6) + case DOUBLE => opcs(7) + } + } + } + emit(opc) + } + + // ---------------- primitive operations ---------------- + + // can-multi-thread + final def emitPrimitive(opcs: Array[Int], tk: BType): Unit = { + val opc = { + // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" + tk match { + case LONG => opcs(1) + case FLOAT => opcs(2) + case DOUBLE => opcs(3) + case _ => opcs(0) + } + } + emit(opc) + } + + // can-multi-thread + final def drop(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) } + + // can-multi-thread + final def dup(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) } + + // ---------------- type checks and casts ---------------- + + // can-multi-thread + final def isInstance(tk: RefBType): Unit = { + jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.classOrArrayType) + } + + // can-multi-thread + final def checkCast(tk: RefBType): Unit = { + // TODO ICode also requires: but that's too much, right? assert(!isBoxedType(tk), "checkcast on boxed type: " + tk) + jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.classOrArrayType) + } + + def abort(msg: String): Nothing = { + report.error(msg) + throw new RuntimeException(msg) + } + + } // end of class JCodeMethodN + + /* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */ + object JCodeMethodN { + + import asm.Opcodes._ + + // ---------------- conversions ---------------- + + val fromByteT2T = { Array( -1, -1, I2C, -1, I2L, I2F, I2D) } // do nothing for (BYTE -> SHORT) and for (BYTE -> INT) + val fromCharT2T = { Array(I2B, I2S, -1, -1, I2L, I2F, I2D) } // for (CHAR -> INT) do nothing + val fromShortT2T = { Array(I2B, -1, I2C, -1, I2L, I2F, I2D) } // for (SHORT -> INT) do nothing + val fromIntT2T = { Array(I2B, I2S, I2C, -1, I2L, I2F, I2D) } + + // ---------------- array load and store ---------------- + + val aloadOpcodes = { Array(AALOAD, BALOAD, SALOAD, CALOAD, IALOAD, LALOAD, FALOAD, DALOAD) } + val astoreOpcodes = { Array(AASTORE, BASTORE, SASTORE, CASTORE, IASTORE, LASTORE, FASTORE, DASTORE) } + val returnOpcodes = { Array(ARETURN, IRETURN, IRETURN, IRETURN, IRETURN, LRETURN, FRETURN, DRETURN) } + + // ---------------- primitive operations ---------------- + + val negOpcodes: Array[Int] = { Array(INEG, LNEG, FNEG, DNEG) } + val addOpcodes: Array[Int] = { Array(IADD, LADD, FADD, DADD) } + val subOpcodes: Array[Int] = { Array(ISUB, LSUB, FSUB, DSUB) } + val mulOpcodes: Array[Int] = { Array(IMUL, LMUL, FMUL, DMUL) } + val divOpcodes: Array[Int] = { Array(IDIV, LDIV, FDIV, DDIV) } + val remOpcodes: Array[Int] = { Array(IREM, LREM, FREM, DREM) } + + } // end of object JCodeMethodN + + // ---------------- adapted from scalaPrimitives ---------------- + + /* Given `code` reports the src TypeKind of the coercion indicated by `code`. + * To find the dst TypeKind, `ScalaPrimitivesOps.generatedKind(code)` can be used. + * + * can-multi-thread + */ + final def coercionFrom(code: Int): BType = { + import ScalaPrimitivesOps._ + (code: @switch) match { + case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE + case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT + case C2B | C2S | C2C | C2I | C2L | C2F | C2D => CHAR + case I2B | I2S | I2C | I2I | I2L | I2F | I2D => INT + case L2B | L2S | L2C | L2I | L2L | L2F | L2D => LONG + case F2B | F2S | F2C | F2I | F2L | F2F | F2D => FLOAT + case D2B | D2S | D2C | D2I | D2L | D2F | D2D => DOUBLE + } + } + + /* If code is a coercion primitive, the result type. + * + * can-multi-thread + */ + final def coercionTo(code: Int): BType = { + import ScalaPrimitivesOps._ + (code: @switch) match { + case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE + case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR + case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT + case B2I | C2I | S2I | I2I | L2I | F2I | D2I => INT + case B2L | C2L | S2L | I2L | L2L | F2L | D2L => LONG + case B2F | C2F | S2F | I2F | L2F | F2F | D2F => FLOAT + case B2D | C2D | S2D | I2D | L2D | F2D | D2D => DOUBLE + } + } + + implicit class InsnIterMethodNode(mnode: asm.tree.MethodNode) { + @`inline` final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { mnode.instructions.foreachInsn(f) } + } + + implicit class InsnIterInsnList(lst: asm.tree.InsnList) { + + @`inline` final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { + val insnIter = lst.iterator() + while (insnIter.hasNext) { + f(insnIter.next()) + } + } + } +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala new file mode 100644 index 000000000000..1d8a9c579cb9 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala @@ -0,0 +1,908 @@ +package dotty.tools +package backend +package jvm + +import scala.language.unsafeNulls + +import scala.annotation.tailrec + +import scala.collection.{ mutable, immutable } + +import scala.tools.asm +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.TreeTypeMap +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.NameKinds._ +import dotty.tools.dotc.core.Names.TermName +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.report +import dotty.tools.dotc.transform.SymUtils._ + +/* + * + * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ + * @version 1.0 + * + */ +trait BCodeSkelBuilder extends BCodeHelpers { + import int.{_, given} + import DottyBackendInterface.{symExtensions, _} + import tpd._ + import bTypes._ + import coreBTypes._ + import bCodeAsmCommon._ + + lazy val NativeAttr: Symbol = requiredClass[scala.native] + + /** The destination of a value generated by `genLoadTo`. */ + enum LoadDestination: + /** The value is put on the stack, and control flows through to the next opcode. */ + case FallThrough + /** The value is put on the stack, and control flow is transferred to the given `label`. */ + case Jump(label: asm.Label) + /** The value is RETURN'ed from the enclosing method. */ + case Return + /** The value is ATHROW'n. */ + case Throw + end LoadDestination + + /* + * There's a dedicated PlainClassBuilder for each CompilationUnit, + * which simplifies the initialization of per-class data structures in `genPlainClass()` which in turn delegates to `initJClass()` + * + * The entry-point to emitting bytecode instructions is `genDefDef()` where the per-method data structures are initialized, + * including `resetMethodBookkeeping()` and `initJMethod()`. + * Once that's been done, and assuming the method being visited isn't abstract, `emitNormalMethodBody()` populates + * the ASM MethodNode instance with ASM AbstractInsnNodes. + * + * Given that CleanUp delivers trees that produce values on the stack, + * the entry-point to all-things instruction-emit is `genLoad()`. + * There, an operation taking N arguments results in recursively emitting instructions to lead each of them, + * followed by emitting instructions to process those arguments (to be found at run-time on the operand-stack). + * + * In a few cases the above recipe deserves more details, as provided in the documentation for: + * - `genLoadTry()` + * - `genSynchronized() + * - `jumpDest` , `cleanups` , `labelDefsAtOrUnder` + */ + abstract class PlainSkelBuilder(cunit: CompilationUnit) + extends BCClassGen + with BCAnnotGen + with BCInnerClassGen + with JAndroidBuilder + with BCForwardersGen + with BCPickles + with BCJGenSigGen { + + // Strangely I can't find this in the asm code 255, but reserving 1 for "this" + inline val MaximumJvmParameters = 254 + + // current class + var cnode: ClassNode1 = null + var thisName: String = null // the internal name of the class being emitted + + var claszSymbol: Symbol = null + var isCZParcelable = false + var isCZStaticModule = false + + /* ---------------- idiomatic way to ask questions to typer ---------------- */ + + def paramTKs(app: Apply, take: Int = -1): List[BType] = app match { + case Apply(fun, _) => + val funSym = fun.symbol + (funSym.info.firstParamTypes map toTypeKind) // this tracks mentioned inner classes (in innerClassBufferASM) + } + + def symInfoTK(sym: Symbol): BType = { + toTypeKind(sym.info) // this tracks mentioned inner classes (in innerClassBufferASM) + } + + def tpeTK(tree: Tree): BType = { toTypeKind(tree.tpe) } + + override def getCurrentCUnit(): CompilationUnit = { cunit } + + /* ---------------- helper utils for generating classes and fields ---------------- */ + + def genPlainClass(cd0: TypeDef) = cd0 match { + case TypeDef(_, impl: Template) => + assert(cnode == null, "GenBCode detected nested methods.") + + claszSymbol = cd0.symbol + isCZParcelable = isAndroidParcelableClass(claszSymbol) + isCZStaticModule = claszSymbol.isStaticModuleClass + thisName = internalName(claszSymbol) + + cnode = new ClassNode1() + + initJClass(cnode) + + val cd = if (isCZStaticModule) { + // Move statements from the primary constructor following the superclass constructor call to + // a newly synthesised tree representing the "", which also assigns the MODULE$ field. + // Because the assigments to both the module instance fields, and the fields of the module itself + // are in the , these fields can be static + final. + + // Should we do this transformation earlier, say in Constructors? Or would that just cause + // pain for scala-{js, native}? + // + // @sjrd (https://github.com/lampepfl/dotty/pull/9181#discussion_r457458205): + // moving that before the back-end would make things significantly more complicated for + // Scala.js and Native. Both have a first-class concept of ModuleClass, and encode the + // singleton pattern of MODULE$ in a completely different way. In the Scala.js IR, there + // even isn't anything that corresponds to MODULE$ per se. + // + // So if you move this before the back-end, then Scala.js and Scala Native will have to + // reverse all the effects of this transformation, which would be counter-productive. + + + // TODO: remove `!f.name.is(LazyBitMapName)` once we change lazy val encoding + // https://github.com/lampepfl/dotty/issues/7140 + // + // Lazy val encoding assumes bitmap fields are non-static + // + // See `tests/run/given-var.scala` + // + + // !!! Part of this logic is duplicated in JSCodeGen.genCompilationUnit + claszSymbol.info.decls.foreach { f => + if f.isField && !f.name.is(LazyBitMapName) then + f.setFlag(JavaStatic) + } + + val (clinits, body) = impl.body.partition(stat => stat.isInstanceOf[DefDef] && stat.symbol.isStaticConstructor) + + val (uptoSuperStats, remainingConstrStats) = splitAtSuper(impl.constr.rhs.asInstanceOf[Block].stats) + val clInitSymbol: TermSymbol = + if (clinits.nonEmpty) clinits.head.symbol.asTerm + else newSymbol( + claszSymbol, + nme.STATIC_CONSTRUCTOR, + JavaStatic | Method, + MethodType(Nil)(_ => Nil, _ => defn.UnitType), + privateWithin = NoSymbol, + coord = claszSymbol.coord + ) + + val moduleField = newSymbol( + claszSymbol, + str.MODULE_INSTANCE_FIELD.toTermName, + JavaStatic | Final, + claszSymbol.typeRef, + privateWithin = NoSymbol, + coord = claszSymbol.coord + ).entered + + val thisMap = new TreeMap { + override def transform(tree: Tree)(using Context) = { + val tp = tree.tpe.substThis(claszSymbol.asClass, claszSymbol.sourceModule.termRef) + tree.withType(tp) match { + case tree: This if tree.symbol == claszSymbol => + ref(claszSymbol.sourceModule) + case tree => + super.transform(tree) + } + } + } + + def rewire(stat: Tree) = thisMap.transform(stat).changeOwner(claszSymbol.primaryConstructor, clInitSymbol) + + val callConstructor = New(claszSymbol.typeRef).select(claszSymbol.primaryConstructor).appliedToTermArgs(Nil) + val assignModuleField = Assign(ref(moduleField), callConstructor) + val remainingConstrStatsSubst = remainingConstrStats.map(rewire) + val clinit = clinits match { + case (ddef: DefDef) :: _ => + cpy.DefDef(ddef)(rhs = Block(ddef.rhs :: assignModuleField :: remainingConstrStatsSubst, unitLiteral)) + case _ => + DefDef(clInitSymbol, Block(assignModuleField :: remainingConstrStatsSubst, unitLiteral)) + } + + val constr2 = { + val rhs = Block(uptoSuperStats, impl.constr.rhs.asInstanceOf[Block].expr) + cpy.DefDef(impl.constr)(rhs = rhs) + } + + val impl2 = cpy.Template(impl)(constr = constr2, body = clinit :: body) + cpy.TypeDef(cd0)(rhs = impl2) + } else cd0 + + val hasStaticCtor = isCZStaticModule || cd.symbol.info.decls.exists(_.isStaticConstructor) + if (!hasStaticCtor && isCZParcelable) fabricateStaticInitAndroid() + + val optSerial: Option[Long] = + claszSymbol.getAnnotation(defn.SerialVersionUIDAnnot).flatMap { annot => + if (claszSymbol.is(Trait)) { + report.warning("@SerialVersionUID does nothing on a trait", annot.tree.sourcePos) + None + } else { + val vuid = annot.argumentConstant(0).map(_.longValue) + if (vuid.isEmpty) + report.error("The argument passed to @SerialVersionUID must be a constant", + annot.argument(0).getOrElse(annot.tree).sourcePos) + vuid + } + } + if (optSerial.isDefined) { addSerialVUID(optSerial.get, cnode)} + + addClassFields() + gen(cd.rhs) + + if (AsmUtils.traceClassEnabled && cnode.name.contains(AsmUtils.traceClassPattern)) + AsmUtils.traceClass(cnode) + + cnode.innerClasses + assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().") + + } // end of method genPlainClass() + + /* + * must-single-thread + */ + private def initJClass(jclass: asm.ClassVisitor): Unit = { + + val ps = claszSymbol.info.parents + val superClass: String = if (ps.isEmpty) ObjectRef.internalName else internalName(ps.head.typeSymbol) + val interfaceNames0 = classBTypeFromSymbol(claszSymbol).info.interfaces.map(_.internalName) + /* To avoid deadlocks when combining objects, lambdas and multi-threading, + * lambdas in objects are compiled to instance methods of the module class + * instead of static methods (see tests/run/deadlock.scala and + * https://github.com/scala/scala-dev/issues/195 for details). + * This has worked well for us so far but this is problematic for + * serialization: serializing a lambda requires serializing all the values + * it captures, if this lambda is in an object, this means serializing the + * enclosing object, which fails if the object does not extend + * Serializable. + * Because serializing objects is basically free since #5775, it seems like + * the simplest solution is to simply make all objects Serializable, this + * certainly seems preferable to deadlocks. + * This cannot be done earlier because Scala.js would not like it (#9596). + */ + val interfaceNames = + if (claszSymbol.is(ModuleClass) && !interfaceNames0.contains("java/io/Serializable")) + interfaceNames0 :+ "java/io/Serializable" + else + interfaceNames0 + + val flags = javaFlags(claszSymbol) + + val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner) + cnode.visit(classfileVersion, flags, + thisName, thisSignature, + superClass, interfaceNames.toArray) + + if (emitSource) { + cnode.visitSource(cunit.source.file.name, null /* SourceDebugExtension */) + } + + enclosingMethodAttribute(claszSymbol, internalName, asmMethodType(_).descriptor) match { + case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) => + cnode.visitOuterClass(className, methodName, methodDescriptor) + case _ => () + } + + val ssa = None // TODO: inlined form `getAnnotPickle(thisName, claszSymbol)`. Should something be done on Dotty? + cnode.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) + emitAnnotations(cnode, claszSymbol.annotations ++ ssa) + + if (!isCZStaticModule && !isCZParcelable) { + val skipStaticForwarders = (claszSymbol.is(Module) || ctx.settings.XnoForwarders.value) + if (!skipStaticForwarders) { + val lmoc = claszSymbol.companionModule + // add static forwarders if there are no name conflicts; see bugs #363 and #1735 + if (lmoc != NoSymbol) { + // it must be a top level class (name contains no $s) + val isCandidateForForwarders = (lmoc.is(Module)) && lmoc.isStatic + if (isCandidateForForwarders) { + report.log(s"Adding static forwarders from '$claszSymbol' to implementations in '$lmoc'") + addForwarders(cnode, thisName, lmoc.moduleClass) + } + } + } + + } + + // the invoker is responsible for adding a class-static constructor. + + } // end of method initJClass + + /* + * must-single-thread + */ + private def fabricateStaticInitAndroid(): Unit = { + + val clinit: asm.MethodVisitor = cnode.visitMethod( + GenBCodeOps.PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED + CLASS_CONSTRUCTOR_NAME, + "()V", + null, // no java-generic-signature + null // no throwable exceptions + ) + clinit.visitCode() + + legacyAddCreatorCode(clinit, cnode, thisName) + + clinit.visitInsn(asm.Opcodes.RETURN) + clinit.visitMaxs(0, 0) // just to follow protocol, dummy arguments + clinit.visitEnd() + } + + def addClassFields(): Unit = { + /* Non-method term members are fields, except for module members. Module + * members can only happen on .NET (no flatten) for inner traits. There, + * a module symbol is generated (transformInfo in mixin) which is used + * as owner for the members of the implementation class (so that the + * backend emits them as static). + * No code is needed for this module symbol. + */ + for (f <- claszSymbol.info.decls.filter(p => p.isTerm && !p.is(Method))) { + val javagensig = getGenericSignature(f, claszSymbol) + val flags = javaFieldFlags(f) + + assert(!f.isStaticMember || !claszSymbol.isInterface || !f.is(Mutable), + s"interface $claszSymbol cannot have non-final static field $f") + + val jfield = new asm.tree.FieldNode( + flags, + f.javaSimpleName, + symInfoTK(f).descriptor, + javagensig, + null // no initial value + ) + cnode.fields.add(jfield) + emitAnnotations(jfield, f.annotations) + } + + } // end of method addClassFields() + + // current method + var mnode: MethodNode1 = null + var jMethodName: String = null + var isMethSymStaticCtor = false + var returnType: BType = null + var methSymbol: Symbol = null + // used by genLoadTry() and genSynchronized() + var earlyReturnVar: Symbol = null + var shouldEmitCleanup = false + // line numbers + var lastEmittedLineNr = -1 + + object bc extends JCodeMethodN { + override def jmethod = PlainSkelBuilder.this.mnode + } + + /* ---------------- Part 1 of program points, ie Labels in the ASM world ---------------- */ + + /* + * A jump is represented as a Return node whose `from` symbol denotes a Labeled's Bind node, the target of the jump. + * The `jumpDest` map is used to find the `LoadDestination` at the end of the `Labeled` block, as well as the + * corresponding expected type. The `LoadDestination` can never be `FallThrough` here. + */ + var jumpDest: immutable.Map[ /* Labeled */ Symbol, (BType, LoadDestination) ] = null + def registerJumpDest(labelSym: Symbol, expectedType: BType, dest: LoadDestination): Unit = { + assert(labelSym.is(Label), s"trying to register a jump-dest for a non-label symbol, at: ${labelSym.span}") + assert(dest != LoadDestination.FallThrough, s"trying to register a FallThrough dest for label, at: ${labelSym.span}") + assert(!jumpDest.contains(labelSym), s"trying to register a second jump-dest for label, at: ${labelSym.span}") + jumpDest += (labelSym -> (expectedType, dest)) + } + def findJumpDest(labelSym: Symbol): (BType, LoadDestination) = { + assert(labelSym.is(Label), s"trying to map a non-label symbol to an asm.Label, at: ${labelSym.span}") + jumpDest.getOrElse(labelSym, { + abort(s"unknown label symbol, for label at: ${labelSym.span}") + }) + } + + /* + * A program point may be lexically nested (at some depth) + * (a) in the try-clause of a try-with-finally expression + * (b) in a synchronized block. + * Each of the constructs above establishes a "cleanup block" to execute upon + * both normal-exit, early-return, and abrupt-termination of the instructions it encloses. + * + * The `cleanups` LIFO queue represents the nesting of active (for the current program point) + * pending cleanups. For each such cleanup an asm.Label indicates the start of its cleanup-block. + * At any given time during traversal of the method body, + * the head of `cleanups` denotes the cleanup-block for the closest enclosing try-with-finally or synchronized-expression. + * + * `cleanups` is used: + * + * (1) upon visiting a Return statement. + * In case of pending cleanups, we can't just emit a RETURN instruction, but must instead: + * - store the result (if any) in `earlyReturnVar`, and + * - jump to the next pending cleanup. + * See `genReturn()` + * + * (2) upon emitting a try-with-finally or a synchronized-expr, + * In these cases, the targets of the above jumps are emitted, + * provided an early exit was actually encountered somewhere in the protected clauses. + * See `genLoadTry()` and `genSynchronized()` + * + * The code thus emitted for jumps and targets covers the early-return case. + * The case of abrupt (ie exceptional) termination is covered by exception handlers + * emitted for that purpose as described in `genLoadTry()` and `genSynchronized()`. + */ + var cleanups: List[asm.Label] = Nil + def registerCleanup(finCleanup: asm.Label): Unit = { + if (finCleanup != null) { cleanups = finCleanup :: cleanups } + } + def unregisterCleanup(finCleanup: asm.Label): Unit = { + if (finCleanup != null) { + assert(cleanups.head eq finCleanup, + s"Bad nesting of cleanup operations: $cleanups trying to unregister: $finCleanup") + cleanups = cleanups.tail + } + } + + /* ---------------- local variables and params ---------------- */ + + case class Local(tk: BType, name: String, idx: Int, isSynth: Boolean) + + /* + * Bookkeeping for method-local vars and method-params. + * + * TODO: use fewer slots. local variable slots are never re-used in separate blocks. + * In the following example, x and y could use the same slot. + * def foo() = { + * { val x = 1 } + * { val y = "a" } + * } + */ + object locals { + + private val slots = mutable.AnyRefMap.empty[Symbol, Local] // (local-or-param-sym -> Local(BType, name, idx, isSynth)) + + private var nxtIdx = -1 // next available index for local-var + + def reset(isStaticMethod: Boolean): Unit = { + slots.clear() + nxtIdx = if (isStaticMethod) 0 else 1 + } + + def contains(locSym: Symbol): Boolean = { slots.contains(locSym) } + + def apply(locSym: Symbol): Local = { slots.apply(locSym) } + + /* Make a fresh local variable, ensuring a unique name. + * The invoker must make sure inner classes are tracked for the sym's tpe. + */ + def makeLocal(tk: BType, name: String, tpe: Type, pos: Span): Symbol = { + + val locSym = newSymbol(methSymbol, name.toTermName, Synthetic, tpe, NoSymbol, pos) + makeLocal(locSym, tk) + locSym + } + + def makeLocal(locSym: Symbol): Local = { + makeLocal(locSym, symInfoTK(locSym)) + } + + def getOrMakeLocal(locSym: Symbol): Local = { + // `getOrElse` below has the same effect as `getOrElseUpdate` because `makeLocal()` adds an entry to the `locals` map. + slots.getOrElse(locSym, makeLocal(locSym)) + } + + def reuseLocal(sym: Symbol, loc: Local): Unit = + val existing = slots.put(sym, loc) + if (existing.isDefined) + report.error("attempt to create duplicate local var.", ctx.source.atSpan(sym.span)) + + def reuseThisSlot(sym: Symbol): Unit = + reuseLocal(sym, Local(symInfoTK(sym), sym.javaSimpleName, 0, sym.is(Synthetic))) + + private def makeLocal(sym: Symbol, tk: BType): Local = { + assert(nxtIdx != -1, "not a valid start index") + val loc = Local(tk, sym.javaSimpleName, nxtIdx, sym.is(Synthetic)) + val existing = slots.put(sym, loc) + if (existing.isDefined) + report.error("attempt to create duplicate local var.", ctx.source.atSpan(sym.span)) + assert(tk.size > 0, "makeLocal called for a symbol whose type is Unit.") + nxtIdx += tk.size + loc + } + + // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol. + def store(locSym: Symbol): Unit = { + val Local(tk, _, idx, _) = slots(locSym) + bc.store(idx, tk) + } + + def load(locSym: Symbol): Unit = { + val Local(tk, _, idx, _) = slots(locSym) + bc.load(idx, tk) + } + + } + + /* ---------------- Part 2 of program points, ie Labels in the ASM world ---------------- */ + + // bookkeeping the scopes of non-synthetic local vars, to emit debug info (`emitVars`). + var varsInScope: List[(Symbol, asm.Label)] = null // (local-var-sym -> start-of-scope) + + // helpers around program-points. + def lastInsn: asm.tree.AbstractInsnNode = mnode.instructions.getLast + def currProgramPoint(): asm.Label = { + lastInsn match { + case labnode: asm.tree.LabelNode => labnode.getLabel + case _ => + val pp = new asm.Label + mnode visitLabel pp + pp + } + } + def markProgramPoint(lbl: asm.Label): Unit = { + val skip = (lbl == null) || isAtProgramPoint(lbl) + if (!skip) { mnode visitLabel lbl } + } + def isAtProgramPoint(lbl: asm.Label): Boolean = { + def getNonLineNumberNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match { + case a: asm.tree.LineNumberNode => getNonLineNumberNode(a.getPrevious) // line numbers aren't part of code itself + case _ => a + } + (getNonLineNumberNode(lastInsn) match { + case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); + case _ => false } ) + } + def lineNumber(tree: Tree): Unit = { + if (!emitLines || !tree.span.exists) return; + val nr = ctx.source.offsetToLine(tree.span.point) + 1 + if (nr != lastEmittedLineNr) { + lastEmittedLineNr = nr + lastInsn match { + case lnn: asm.tree.LineNumberNode => + // overwrite previous landmark as no instructions have been emitted for it + lnn.line = nr + case _ => + mnode.visitLineNumber(nr, currProgramPoint()) + } + } + } + + // on entering a method + def resetMethodBookkeeping(dd: DefDef) = { + val rhs = dd.rhs + locals.reset(isStaticMethod = methSymbol.isStaticMember) + jumpDest = immutable.Map.empty + + // check previous invocation of genDefDef exited as many varsInScope as it entered. + assert(varsInScope == null, "Unbalanced entering/exiting of GenBCode's genBlock().") + // check previous invocation of genDefDef unregistered as many cleanups as it registered. + assert(cleanups == Nil, "Previous invocation of genDefDef didn't unregister as many cleanups as it registered.") + earlyReturnVar = null + shouldEmitCleanup = false + + lastEmittedLineNr = -1 + } + + /* ---------------- top-down traversal invoking ASM Tree API along the way ---------------- */ + + def gen(tree: Tree): Unit = { + tree match { + case tpd.EmptyTree => () + + case ValDef(name, tpt, rhs) => () // fields are added in `genPlainClass()`, via `addClassFields()` + + case dd: DefDef => + /* First generate a static forwarder if this is a non-private trait + * trait method. This is required for super calls to this method, which + * go through the static forwarder in order to work around limitations + * of the JVM. + * + * For the $init$ method, we must not leave it as a default method, but + * instead we must put the whole body in the static method. If we leave + * it as a default method, Java classes cannot extend Scala classes that + * extend several Scala traits, since they then inherit unrelated default + * $init$ methods. See #8599. scalac does the same thing. + * + * In theory, this would go in a separate MiniPhase, but it would have to + * sit in a MegaPhase of its own between GenSJSIR and GenBCode, so the cost + * is not worth it. We directly do it in this back-end instead, which also + * kind of makes sense because it is JVM-specific. + */ + val sym = dd.symbol + val needsStaticImplMethod = + claszSymbol.isInterface && !dd.rhs.isEmpty && !sym.isPrivate && !sym.isStaticMember + if needsStaticImplMethod then + if sym.name == nme.TRAIT_CONSTRUCTOR then + genTraitConstructorDefDef(dd) + else + genStaticForwarderForDefDef(dd) + genDefDef(dd) + else + genDefDef(dd) + + case tree: Template => + val body = + if (tree.constr.rhs.isEmpty) tree.body + else tree.constr :: tree.body + body foreach gen + + case _ => abort(s"Illegal tree in gen: $tree") + } + } + + /* + * must-single-thread + */ + def initJMethod(flags: Int, params: List[Symbol]): Unit = { + + val jgensig = getGenericSignature(methSymbol, claszSymbol) + val (excs, others) = methSymbol.annotations.partition(_.symbol eq defn.ThrowsAnnot) + val thrownExceptions: List[String] = getExceptions(excs) + + val bytecodeName = + if (isMethSymStaticCtor) CLASS_CONSTRUCTOR_NAME + else jMethodName + + val mdesc = asmMethodType(methSymbol).descriptor + mnode = cnode.visitMethod( + flags, + bytecodeName, + mdesc, + jgensig, + mkArrayS(thrownExceptions) + ).asInstanceOf[MethodNode1] + + // TODO param names: (m.params map (p => javaName(p.sym))) + + emitAnnotations(mnode, others) + emitParamNames(mnode, params) + emitParamAnnotations(mnode, params.map(_.annotations)) + + } // end of method initJMethod + + private def genTraitConstructorDefDef(dd: DefDef): Unit = + val statifiedDef = makeStatifiedDefDef(dd) + genDefDef(statifiedDef) + + /** Creates a copy of the given DefDef that is static and where an explicit + * self parameter represents the original `this` value. + * + * Example: from + * {{{ + * trait Enclosing { + * def foo(x: Int): String = this.toString() + x + * } + * }}} + * the statified version of `foo` would be + * {{{ + * static def foo($self: Enclosing, x: Int): String = $self.toString() + x + * }}} + */ + private def makeStatifiedDefDef(dd: DefDef): DefDef = + val origSym = dd.symbol.asTerm + val newSym = makeStatifiedDefSymbol(origSym, origSym.name) + tpd.DefDef(newSym, { paramRefss => + val selfParamRef :: regularParamRefs = paramRefss.head: @unchecked + val enclosingClass = origSym.owner.asClass + new TreeTypeMap( + typeMap = _.substThis(enclosingClass, selfParamRef.symbol.termRef) + .subst(dd.termParamss.head.map(_.symbol), regularParamRefs.map(_.symbol.termRef)), + treeMap = { + case tree: This if tree.symbol == enclosingClass => selfParamRef + case tree => tree + }, + oldOwners = origSym :: Nil, + newOwners = newSym :: Nil + ).transform(dd.rhs) + }) + + private def genStaticForwarderForDefDef(dd: DefDef): Unit = + val forwarderDef = makeStaticForwarder(dd) + genDefDef(forwarderDef) + + /* Generates a synthetic static forwarder for a trait method. + * For a method such as + * def foo(...args: Ts): R + * in trait X, we generate the following method: + * static def foo$($this: X, ...args: Ts): R = + * invokespecial $this.X::foo(...args) + * We force an invokespecial with the attachment UseInvokeSpecial. It is + * necessary to make sure that the call will not follow overrides of foo() + * in subtraits and subclasses, since the whole point of this forward is to + * encode super calls. + */ + private def makeStaticForwarder(dd: DefDef): DefDef = + val origSym = dd.symbol.asTerm + val name = traitSuperAccessorName(origSym).toTermName + val sym = makeStatifiedDefSymbol(origSym, name) + tpd.DefDef(sym, { paramss => + val params = paramss.head + tpd.Apply(params.head.select(origSym), params.tail) + .withAttachment(BCodeHelpers.UseInvokeSpecial, ()) + }) + + private def makeStatifiedDefSymbol(origSym: TermSymbol, name: TermName): TermSymbol = + val info = origSym.info match + case mt: MethodType => + MethodType(nme.SELF :: mt.paramNames, origSym.owner.typeRef :: mt.paramInfos, mt.resType) + origSym.copy( + name = name.toTermName, + flags = Method | JavaStatic, + info = info + ).asTerm + + def genDefDef(dd: DefDef): Unit = { + val rhs = dd.rhs + val vparamss = dd.termParamss + // the only method whose implementation is not emitted: getClass() + if (dd.symbol eq defn.Any_getClass) { return } + assert(mnode == null, "GenBCode detected nested method.") + + methSymbol = dd.symbol + jMethodName = methSymbol.javaSimpleName + returnType = asmMethodType(dd.symbol).returnType + isMethSymStaticCtor = methSymbol.isStaticConstructor + + resetMethodBookkeeping(dd) + + // add method-local vars for params + + assert(vparamss.isEmpty || vparamss.tail.isEmpty, s"Malformed parameter list: $vparamss") + val params = if (vparamss.isEmpty) Nil else vparamss.head + for (p <- params) { locals.makeLocal(p.symbol) } + // debug assert((params.map(p => locals(p.symbol).tk)) == asmMethodType(methSymbol).getArgumentTypes.toList, "debug") + + if (params.size > MaximumJvmParameters) { + // SI-7324 + report.error(em"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.", ctx.source.atSpan(methSymbol.span)) + return + } + + val isNative = methSymbol.hasAnnotation(NativeAttr) + val isAbstractMethod = (methSymbol.is(Deferred) || (methSymbol.owner.isInterface && ((methSymbol.is(Deferred)) || methSymbol.isClassConstructor))) + val flags = + import GenBCodeOps.addFlagIf + javaFlags(methSymbol) + .addFlagIf(isAbstractMethod, asm.Opcodes.ACC_ABSTRACT) + .addFlagIf(false /*methSymbol.isStrictFP*/, asm.Opcodes.ACC_STRICT) + .addFlagIf(isNative, asm.Opcodes.ACC_NATIVE) // native methods of objects are generated in mirror classes + + // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize } + val paramSyms = params.map(_.symbol) + initJMethod(flags, paramSyms) + + + if (!isAbstractMethod && !isNative) { + // #14773 Reuse locals slots for tailrec-generated mutable vars + val trimmedRhs: Tree = + @tailrec def loop(stats: List[Tree]): List[Tree] = + stats match + case (tree @ ValDef(TailLocalName(_, _), _, _)) :: rest if tree.symbol.isAllOf(Mutable | Synthetic) => + tree.rhs match + case This(_) => + locals.reuseThisSlot(tree.symbol) + loop(rest) + case rhs: Ident if paramSyms.contains(rhs.symbol) => + locals.reuseLocal(tree.symbol, locals(rhs.symbol)) + loop(rest) + case _ => + stats + case _ => + stats + end loop + + rhs match + case Block(stats, expr) => + val trimmedStats = loop(stats) + if trimmedStats eq stats then + rhs + else + Block(trimmedStats, expr) + case _ => + rhs + end trimmedRhs + + def emitNormalMethodBody(): Unit = { + val veryFirstProgramPoint = currProgramPoint() + + if trimmedRhs == tpd.EmptyTree then + report.error( + em"Concrete method has no definition: $dd${ + if (ctx.settings.Ydebug.value) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" + else ""}", + ctx.source.atSpan(NoSpan) + ) + else + genLoadTo(trimmedRhs, returnType, LoadDestination.Return) + + if (emitVars) { + // add entries to LocalVariableTable JVM attribute + val onePastLastProgramPoint = currProgramPoint() + val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0) + if (!hasStaticBitSet) { + mnode.visitLocalVariable( + "this", + "L" + thisName + ";", + null, + veryFirstProgramPoint, + onePastLastProgramPoint, + 0 + ) + } + for (p <- params) { emitLocalVarScope(p.symbol, veryFirstProgramPoint, onePastLastProgramPoint, force = true) } + } + + if (isMethSymStaticCtor) { appendToStaticCtor(dd) } + } // end of emitNormalMethodBody() + + lineNumber(rhs) + emitNormalMethodBody() + + // Note we don't invoke visitMax, thus there are no FrameNode among mnode.instructions. + // The only non-instruction nodes to be found are LabelNode and LineNumberNode. + } + + if (AsmUtils.traceMethodEnabled && mnode.name.contains(AsmUtils.traceMethodPattern)) + AsmUtils.traceMethod(mnode) + + mnode = null + } // end of method genDefDef() + + /* + * must-single-thread + * + * TODO document, explain interplay with `fabricateStaticInitAndroid()` + */ + private def appendToStaticCtor(dd: DefDef): Unit = { + + def insertBefore( + location: asm.tree.AbstractInsnNode, + i0: asm.tree.AbstractInsnNode, + i1: asm.tree.AbstractInsnNode): Unit = { + if (i0 != null) { + mnode.instructions.insertBefore(location, i0.clone(null)) + mnode.instructions.insertBefore(location, i1.clone(null)) + } + } + + // collect all return instructions + var rets: List[asm.tree.AbstractInsnNode] = Nil + mnode foreachInsn { i => if (i.getOpcode() == asm.Opcodes.RETURN) { rets ::= i } } + if (rets.isEmpty) { return } + + var insnParcA: asm.tree.AbstractInsnNode = null + var insnParcB: asm.tree.AbstractInsnNode = null + // android creator code + if (isCZParcelable) { + // add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator + val andrFieldDescr = classBTypeFromSymbol(AndroidCreatorClass).descriptor + cnode.visitField( + asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL, + "CREATOR", + andrFieldDescr, + null, + null + ) + // INVOKESTATIC CREATOR(): android.os.Parcelable$Creator; -- TODO where does this Android method come from? + val callee = claszSymbol.companionModule.info.member(androidFieldName).symbol + val jowner = internalName(callee.owner) + val jname = callee.javaSimpleName + val jtype = asmMethodType(callee).descriptor + insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype, false) + // PUTSTATIC `thisName`.CREATOR; + insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr) + } + + // insert a few instructions for initialization before each return instruction + for(r <- rets) { + insertBefore(r, insnParcA, insnParcB) + } + + } + + def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false): Unit = { + val Local(tk, name, idx, isSynth) = locals(sym) + if (force || !isSynth) { + mnode.visitLocalVariable(name, tk.descriptor, null, start, end, idx) + } + } + + def genLoadTo(tree: Tree, expectedType: BType, dest: LoadDestination): Unit + + } // end of class PlainSkelBuilder + +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala new file mode 100644 index 000000000000..b5ed27511e7e --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala @@ -0,0 +1,426 @@ +package dotty.tools +package backend +package jvm + +import scala.language.unsafeNulls + +import scala.collection.immutable +import scala.tools.asm + +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.core.StdNames.nme +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.ast.tpd + +/* + * + * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ + * @version 1.0 + * + */ +trait BCodeSyncAndTry extends BCodeBodyBuilder { + import int.given + import tpd._ + import bTypes._ + import coreBTypes._ + /* + * Functionality to lower `synchronized` and `try` expressions. + */ + abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit) { + + def genSynchronized(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { + case Apply(TypeApply(fun, _), args) => + val monitor = locals.makeLocal(ObjectRef, "monitor", defn.ObjectType, tree.span) + val monCleanup = new asm.Label + + // if the synchronized block returns a result, store it in a local variable. + // Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks). + val hasResult = (expectedType != UNIT) + val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult", defn.ObjectType, tree.span) else null + + /* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */ + genLoadQualifier(fun) + bc dup ObjectRef + locals.store(monitor) + emit(asm.Opcodes.MONITORENTER) + + /* ------ (2) Synchronized block. + * Reached by fall-through from (1). + * Protected by: + * (2.a) the EH-version of the monitor-exit, and + * (2.b) whatever protects the whole synchronized expression. + * ------ + */ + val startProtected = currProgramPoint() + registerCleanup(monCleanup) + genLoad(args.head, expectedType /* toTypeKind(tree.tpe.resultType) */) + unregisterCleanup(monCleanup) + if (hasResult) { locals.store(monitorResult) } + nopIfNeeded(startProtected) + val endProtected = currProgramPoint() + + /* ------ (3) monitor-exit after normal, non-early-return, termination of (2). + * Reached by fall-through from (2). + * Protected by whatever protects the whole synchronized expression. + * ------ + */ + locals.load(monitor) + emit(asm.Opcodes.MONITOREXIT) + if (hasResult) { locals.load(monitorResult) } + val postHandler = new asm.Label + bc goTo postHandler + + /* ------ (4) exception-handler version of monitor-exit code. + * Reached upon abrupt termination of (2). + * Protected by whatever protects the whole synchronized expression. + * null => "any" exception in bytecode, like we emit for finally. + * Important not to use j/l/Throwable which dooms the method to a life of interpretation! (SD-233) + * ------ + */ + protect(startProtected, endProtected, currProgramPoint(), null) + locals.load(monitor) + emit(asm.Opcodes.MONITOREXIT) + emit(asm.Opcodes.ATHROW) + + /* ------ (5) cleanup version of monitor-exit code. + * Reached upon early-return from (2). + * Protected by whatever protects the whole synchronized expression. + * ------ + */ + if (shouldEmitCleanup) { + markProgramPoint(monCleanup) + locals.load(monitor) + emit(asm.Opcodes.MONITOREXIT) + pendingCleanups() + } + + /* ------ (6) normal exit of the synchronized expression. + * Reached after normal, non-early-return, termination of (3). + * Protected by whatever protects the whole synchronized expression. + * ------ + */ + mnode visitLabel postHandler + + lineNumber(tree) + + expectedType + } + + /* + * Detects whether no instructions have been emitted since label `lbl` and if so emits a NOP. + * Useful to avoid emitting an empty try-block being protected by exception handlers, + * which results in "java.lang.ClassFormatError: Illegal exception table range". See SI-6102. + */ + def nopIfNeeded(lbl: asm.Label): Unit = { + val noInstructionEmitted = isAtProgramPoint(lbl) + if (noInstructionEmitted) { emit(asm.Opcodes.NOP) } + } + + /* + * Emitting try-catch is easy, emitting try-catch-finally not quite so. + * A finally-block (which always has type Unit, thus leaving the operand stack unchanged) + * affects control-transfer from protected regions, as follows: + * + * (a) `return` statement: + * + * First, the value to return (if any) is evaluated. + * Afterwards, all enclosing finally-blocks are run, from innermost to outermost. + * Only then is the return value (if any) returned. + * + * Some terminology: + * (a.1) Executing a return statement that is protected + * by one or more finally-blocks is called "early return" + * (a.2) the chain of code sections (a code section for each enclosing finally-block) + * to run upon early returns is called "cleanup chain" + * + * As an additional spin, consider a return statement in a finally-block. + * In this case, the value to return depends on how control arrived at that statement: + * in case it arrived via a previous return, the previous return enjoys priority: + * the value to return is given by that statement. + * + * (b) A finally-block protects both the try-clause and the catch-clauses. + * + * Sidenote: + * A try-clause may contain an empty block. On CLR, a finally-block has special semantics + * regarding Abort interruptions; but on the JVM it's safe to elide an exception-handler + * that protects an "empty" range ("empty" as in "containing NOPs only", + * see `asm.optimiz.DanglingExcHandlers` and SI-6720). + * + * This means a finally-block indicates instructions that can be reached: + * (b.1) Upon normal (non-early-returning) completion of the try-clause or a catch-clause + * In this case, the next-program-point is that following the try-catch-finally expression. + * (b.2) Upon early-return initiated in the try-clause or a catch-clause + * In this case, the next-program-point is the enclosing cleanup section (if any), otherwise return. + * (b.3) Upon abrupt termination (due to unhandled exception) of the try-clause or a catch-clause + * In this case, the unhandled exception must be re-thrown after running the finally-block. + * + * (c) finally-blocks are implicit to `synchronized` (a finally-block is added to just release the lock) + * that's why `genSynchronized()` too emits cleanup-sections. + * + * A number of code patterns can be emitted to realize the intended semantics. + * + * A popular alternative (GenICode, javac) consists in duplicating the cleanup-chain at each early-return position. + * The principle at work being that once control is transferred to a cleanup-section, + * control will always stay within the cleanup-chain. + * That is, barring an exception being thrown in a cleanup-section, in which case the enclosing try-block + * (reached via abrupt termination) takes over. + * + * The observations above hint at another code layout, less verbose, for the cleanup-chain. + * + * The code layout that GenBCode emits takes into account that once a cleanup section has been reached, + * jumping to the next cleanup-section (and so on, until the outermost one) realizes the correct semantics. + * + * There is still code duplication in that two cleanup-chains are needed (but this is unavoidable, anyway): + * one for normal control flow and another chain consisting of exception handlers. + * The in-line comments below refer to them as + * - "early-return-cleanups" and + * - "exception-handler-version-of-finally-block" respectively. + * + */ + def genLoadTry(tree: Try): BType = tree match { + case Try(block, catches, finalizer) => + val kind = tpeTK(tree) + + val caseHandlers: List[EHClause] = + for (CaseDef(pat, _, caseBody) <- catches) yield { + pat match { + case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody) + case Ident(nme.WILDCARD) => NamelessEH(jlThrowableRef, caseBody) + case Bind(_, _) => BoundEH (pat.symbol, caseBody) + } + } + + // ------ (0) locals used later ------ + + /* + * `postHandlers` is a program point denoting: + * (a) the finally-clause conceptually reached via fall-through from try-catch-finally + * (in case a finally-block is present); or + * (b) the program point right after the try-catch + * (in case there's no finally-block). + * The name choice emphasizes that the code section lies "after all exception handlers", + * where "all exception handlers" includes those derived from catch-clauses as well as from finally-blocks. + */ + val postHandlers = new asm.Label + + val hasFinally = (finalizer != tpd.EmptyTree) + + /* + * used in the finally-clause reached via fall-through from try-catch, if any. + */ + val guardResult = hasFinally && (kind != UNIT) && mayCleanStack(finalizer) + + /* + * please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type. + * Because those two types can be different, dedicated vars are needed. + */ + val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp", tree.tpe, tree.span) else null + + /* + * upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause) + * AND hasFinally, a cleanup is needed. + */ + val finCleanup = if (hasFinally) new asm.Label else null + + /* ------ (1) try-block, protected by: + * (1.a) the EHs due to case-clauses, emitted in (2), + * (1.b) the EH due to finally-clause, emitted in (3.A) + * (1.c) whatever protects the whole try-catch-finally expression. + * ------ + */ + + val startTryBody = currProgramPoint() + registerCleanup(finCleanup) + genLoad(block, kind) + unregisterCleanup(finCleanup) + nopIfNeeded(startTryBody) + val endTryBody = currProgramPoint() + bc goTo postHandlers + + /** + * A return within a `try` or `catch` block where a `finally` is present ("early return") + * emits a store of the result to a local, jump to a "cleanup" version of the `finally` block, + * and sets `shouldEmitCleanup = true` (see [[PlainBodyBuilder.genReturn]]). + * + * If the try-catch is nested, outer `finally` blocks need to be emitted in a cleanup version + * as well, so the `shouldEmitCleanup` variable remains `true` until the outermost `finally`. + * Nested cleanup `finally` blocks jump to the next enclosing one. For the outermost, we emit + * a read of the local variable, a return, and we set `shouldEmitCleanup = false` (see + * [[pendingCleanups]]). + * + * Now, assume we have + * + * try { return 1 } finally { + * try { println() } finally { println() } + * } + * + * Here, the outer `finally` needs a cleanup version, but the inner one does not. The method + * here makes sure that `shouldEmitCleanup` is only propagated outwards, not inwards to + * nested `finally` blocks. + */ + def withFreshCleanupScope(body: => Unit) = { + val savedShouldEmitCleanup = shouldEmitCleanup + shouldEmitCleanup = false + body + shouldEmitCleanup = savedShouldEmitCleanup || shouldEmitCleanup + } + + /* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause) + * An EH in (2) is reached upon abrupt termination of (1). + * An EH in (2) is protected by: + * (2.a) the EH-version of the finally-clause, if any. + * (2.b) whatever protects the whole try-catch-finally expression. + * ------ + */ + + for (ch <- caseHandlers) withFreshCleanupScope { + + // (2.a) emit case clause proper + val startHandler = currProgramPoint() + var endHandler: asm.Label = null + var excType: ClassBType = null + registerCleanup(finCleanup) + ch match { + case NamelessEH(typeToDrop, caseBody) => + bc drop typeToDrop + genLoad(caseBody, kind) // adapts caseBody to `kind`, thus it can be stored, if `guardResult`, in `tmp`. + nopIfNeeded(startHandler) + endHandler = currProgramPoint() + excType = typeToDrop + + case BoundEH (patSymbol, caseBody) => + // test/files/run/contrib674.scala , a local-var already exists for patSymbol. + // rather than creating on first-access, we do it right away to emit debug-info for the created local var. + val Local(patTK, _, patIdx, _) = locals.getOrMakeLocal(patSymbol) + bc.store(patIdx, patTK) + genLoad(caseBody, kind) + nopIfNeeded(startHandler) + endHandler = currProgramPoint() + emitLocalVarScope(patSymbol, startHandler, endHandler) + excType = patTK.asClassBType + } + unregisterCleanup(finCleanup) + // (2.b) mark the try-body as protected by this case clause. + protect(startTryBody, endTryBody, startHandler, excType) + // (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given. + bc goTo postHandlers + + } + + // Need to save the state of `shouldEmitCleanup` at this point: while emitting the first + // version of the `finally` block below, the variable may become true. But this does not mean + // that we need a cleanup version for the current block, only for the enclosing ones. + val currentFinallyBlockNeedsCleanup = shouldEmitCleanup + + /* ------ (3.A) The exception-handler-version of the finally-clause. + * Reached upon abrupt termination of (1) or one of the EHs in (2). + * Protected only by whatever protects the whole try-catch-finally expression. + * ------ + */ + + // a note on terminology: this is not "postHandlers", despite appearances. + // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts. + if (hasFinally) withFreshCleanupScope { + nopIfNeeded(startTryBody) + val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception. + protect(startTryBody, finalHandler, finalHandler, null) + val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(jlThrowableRef, "exc", defn.ThrowableType, finalizer.span)) + bc.store(eIdx, eTK) + emitFinalizer(finalizer, null, isDuplicate = true) + bc.load(eIdx, eTK) + emit(asm.Opcodes.ATHROW) + } + + /* ------ (3.B) Cleanup-version of the finally-clause. + * Reached upon early RETURN from (1) or upon early RETURN from one of the EHs in (2) + * (and only from there, ie reached only upon early RETURN from + * program regions bracketed by registerCleanup/unregisterCleanup). + * Protected only by whatever protects the whole try-catch-finally expression. + * + * Given that control arrives to a cleanup section only upon early RETURN, + * the value to return (if any) is always available. Therefore, a further RETURN + * found in a cleanup section is always ignored (a warning is displayed, @see `genReturn()`). + * In order for `genReturn()` to know whether the return statement is enclosed in a cleanup section, + * the variable `insideCleanupBlock` is used. + * ------ + */ + + // this is not "postHandlers" either. + // `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause. + // In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid. + if (hasFinally && currentFinallyBlockNeedsCleanup) { + markProgramPoint(finCleanup) + // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted. + emitFinalizer(finalizer, null, isDuplicate = true) + pendingCleanups() + } + + /* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit + * Reached upon normal, non-early-return termination of (1) or of an EH in (2). + * Protected only by whatever protects the whole try-catch-finally expression. + * TODO explain what happens upon RETURN contained in (4) + * ------ + */ + + markProgramPoint(postHandlers) + if (hasFinally) { + emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false` + } + + kind + } // end of genLoadTry() + + /* if no more pending cleanups, all that remains to do is return. Otherwise jump to the next (outer) pending cleanup. */ + private def pendingCleanups(): Unit = { + cleanups match { + case Nil => + if (earlyReturnVar != null) { + locals.load(earlyReturnVar) + bc.emitRETURN(locals(earlyReturnVar).tk) + } else { + bc emitRETURN UNIT + } + shouldEmitCleanup = false + + case nextCleanup :: _ => + bc goTo nextCleanup + } + } + + def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType): Unit = { + val excInternalName: String = + if (excType == null) null + else excType.internalName + assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.") + mnode.visitTryCatchBlock(start, end, handler, excInternalName) + } + + /* `tmp` (if non-null) is the symbol of the local-var used to preserve the result of the try-body, see `guardResult` */ + def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean): Unit = { + var saved: immutable.Map[ /* Labeled */ Symbol, (BType, LoadDestination) ] = null + if (isDuplicate) { + saved = jumpDest + } + // when duplicating, the above guarantees new asm.Labels are used for LabelDefs contained in the finalizer (their vars are reused, that's ok) + if (tmp != null) { locals.store(tmp) } + genLoad(finalizer, UNIT) + if (tmp != null) { locals.load(tmp) } + if (isDuplicate) { + jumpDest = saved + } + } + + /* Does this tree have a try-catch block? */ + def mayCleanStack(tree: Tree): Boolean = tree.find { t => t match { // TODO: use existsSubTree + case Try(_, _, _) => true + case _ => false + } + }.isDefined + + trait EHClause + case class NamelessEH(typeToDrop: ClassBType, caseBody: Tree) extends EHClause + case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause + + } + +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala b/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala new file mode 100644 index 000000000000..dda85e2d5616 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala @@ -0,0 +1,864 @@ +package dotty.tools +package backend +package jvm + +import scala.language.unsafeNulls + +import scala.tools.asm + +/** + * The BTypes component defines The BType class hierarchy. BTypes encapsulates all type information + * that is required after building the ASM nodes. This includes optimizations, geneartion of + * InnerClass attributes and generation of stack map frames. + * + * This representation is immutable and independent of the compiler data structures, hence it can + * be queried by concurrent threads. + */ +abstract class BTypes extends caps.Pure { + + val int: DottyBackendInterface + import int.given + /** + * A map from internal names to ClassBTypes. Every ClassBType is added to this map on its + * construction. + * + * This map is used when computing stack map frames. The asm.ClassWriter invokes the method + * `getCommonSuperClass`. In this method we need to obtain the ClassBType for a given internal + * name. The method assumes that every class type that appears in the bytecode exists in the map. + * + * Concurrent because stack map frames are computed when in the class writer, which might run + * on multiple classes concurrently. + */ + protected def classBTypeFromInternalNameMap: collection.concurrent.Map[String, ClassBType] + // NOTE: Should be a lazy val but scalac does not allow abstract lazy vals (dotty does) + + /** + * Obtain a previously constructed ClassBType for a given internal name. + */ + def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName) + + // Some core BTypes are required here, in class BType, where no Global instance is available. + // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual + // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol. + val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type] + import coreBTypes._ + + /** + * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType + * referring to BTypes. + */ + /*sealed*/ trait BType extends caps.Pure { // Not sealed for now due to SI-8546 + final override def toString: String = this match { + case UNIT => "V" + case BOOL => "Z" + case CHAR => "C" + case BYTE => "B" + case SHORT => "S" + case INT => "I" + case FLOAT => "F" + case LONG => "J" + case DOUBLE => "D" + case ClassBType(internalName) => "L" + internalName + ";" + case ArrayBType(component) => "[" + component + case MethodBType(args, res) => args.mkString("(", "", ")" + res) + } + + /** + * @return The Java descriptor of this type. Examples: + * - int: I + * - java.lang.String: Ljava/lang/String; + * - int[]: [I + * - Object m(String s, double d): (Ljava/lang/String;D)Ljava/lang/Object; + */ + final def descriptor = toString + + /** + * @return 0 for void, 2 for long and double, 1 otherwise + */ + final def size: Int = this match { + case UNIT => 0 + case LONG | DOUBLE => 2 + case _ => 1 + } + + final def isPrimitive: Boolean = this.isInstanceOf[PrimitiveBType] + final def isRef: Boolean = this.isInstanceOf[RefBType] + final def isArray: Boolean = this.isInstanceOf[ArrayBType] + final def isClass: Boolean = this.isInstanceOf[ClassBType] + final def isMethod: Boolean = this.isInstanceOf[MethodBType] + + final def isNonVoidPrimitiveType = isPrimitive && this != UNIT + + final def isNullType = this == srNullRef + final def isNothingType = this == srNothingRef + + final def isBoxed = this.isClass && boxedClasses(this.asClassBType) + + final def isIntSizedType = this == BOOL || this == CHAR || this == BYTE || + this == SHORT || this == INT + final def isIntegralType = this == INT || this == BYTE || this == LONG || + this == CHAR || this == SHORT + final def isRealType = this == FLOAT || this == DOUBLE + final def isNumericType = isIntegralType || isRealType + final def isWideType = size == 2 + + /* + * Subtype check `this <:< other` on BTypes that takes into account the JVM built-in numeric + * promotions (e.g. BYTE to INT). Its operation can be visualized more easily in terms of the + * Java bytecode type hierarchy. + */ + final def conformsTo(other: BType): Boolean = { + assert(isRef || isPrimitive, s"conformsTo cannot handle $this") + assert(other.isRef || other.isPrimitive, s"conformsTo cannot handle $other") + + this match { + case ArrayBType(component) => + if (other == ObjectRef || other == jlCloneableRef || other == jiSerializableRef) true + else other match { + case ArrayBType(otherComponoent) => component.conformsTo(otherComponoent) + case _ => false + } + + case classType: ClassBType => + if (isBoxed) { + if (other.isBoxed) this == other + else if (other == ObjectRef) true + else other match { + case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) // e.g., java/lang/Double conforms to java/lang/Number + case _ => false + } + } else if (isNullType) { + if (other.isNothingType) false + else if (other.isPrimitive) false + else true // Null conforms to all classes (except Nothing) and arrays. + } else if (isNothingType) { + true + } else other match { + case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) + // case ArrayBType(_) => this.isNullType // documentation only, because `if (isNullType)` above covers this case + case _ => + // isNothingType || // documentation only, because `if (isNothingType)` above covers this case + false + } + + case UNIT => + other == UNIT + case BOOL | BYTE | SHORT | CHAR => + this == other || other == INT || other == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt(). + case _ => + assert(isPrimitive && other.isPrimitive, s"Expected primitive types $this - $other") + this == other + } + } + + /** + * Compute the upper bound of two types. + * Takes promotions of numeric primitives into account. + */ + final def maxType(other: BType): BType = this match { + case pt: PrimitiveBType => pt.maxValueType(other) + + case _: ArrayBType | _: ClassBType => + if (isNothingType) return other + if (other.isNothingType) return this + if (this == other) return this + + assert(other.isRef, s"Cannot compute maxType: $this, $other") + // Approximate `lub`. The common type of two references is always ObjectReference. + ObjectRef + } + + /** + * See documentation of [[typedOpcode]]. + * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 8. + */ + private def loadStoreOpcodeOffset: Int = this match { + case UNIT | INT => 0 + case BOOL | BYTE => 5 + case CHAR => 6 + case SHORT => 7 + case FLOAT => 2 + case LONG => 1 + case DOUBLE => 3 + case _ => 4 + } + + /** + * See documentation of [[typedOpcode]]. + * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 16. + */ + private def typedOpcodeOffset: Int = this match { + case UNIT => 5 + case BOOL | CHAR | BYTE | SHORT | INT => 0 + case FLOAT => 2 + case LONG => 1 + case DOUBLE => 3 + case _ => 4 + } + + /** + * Some JVM opcodes have typed variants. This method returns the correct opcode according to + * the type. + * + * @param opcode A JVM instruction opcode. This opcode must be one of ILOAD, ISTORE, IALOAD, + * IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL, ISHR, IUSHR, IAND, IOR + * IXOR and IRETURN. + * @return The opcode adapted to this java type. For example, if this type is `float` and + * `opcode` is `IRETURN`, this method returns `FRETURN`. + */ + final def typedOpcode(opcode: Int): Int = { + if (opcode == asm.Opcodes.IALOAD || opcode == asm.Opcodes.IASTORE) + opcode + loadStoreOpcodeOffset + else + opcode + typedOpcodeOffset + } + + /** + * The asm.Type corresponding to this BType. + * + * Note about asm.Type.getObjectType (*): For class types, the method expects the internal + * name, i.e. without the surrounding 'L' and ';'. For array types on the other hand, the + * method expects a full descriptor, for example "[Ljava/lang/String;". + * + * See method asm.Type.getType that creates a asm.Type from a type descriptor + * - for an OBJECT type, the 'L' and ';' are not part of the range of the created Type + * - for an ARRAY type, the full descriptor is part of the range + */ + def toASMType: asm.Type = this match { + case UNIT => asm.Type.VOID_TYPE + case BOOL => asm.Type.BOOLEAN_TYPE + case CHAR => asm.Type.CHAR_TYPE + case BYTE => asm.Type.BYTE_TYPE + case SHORT => asm.Type.SHORT_TYPE + case INT => asm.Type.INT_TYPE + case FLOAT => asm.Type.FLOAT_TYPE + case LONG => asm.Type.LONG_TYPE + case DOUBLE => asm.Type.DOUBLE_TYPE + case ClassBType(internalName) => asm.Type.getObjectType(internalName) // see (*) above + case a: ArrayBType => asm.Type.getObjectType(a.descriptor) + case m: MethodBType => asm.Type.getMethodType(m.descriptor) + } + + def asRefBType : RefBType = this.asInstanceOf[RefBType] + def asArrayBType : ArrayBType = this.asInstanceOf[ArrayBType] + def asClassBType : ClassBType = this.asInstanceOf[ClassBType] + def asPrimitiveBType : PrimitiveBType = this.asInstanceOf[PrimitiveBType] + } + + sealed trait PrimitiveBType extends BType { + + /** + * The upper bound of two primitive types. The `other` type has to be either a primitive + * type or Nothing. + * + * The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative + * values of Byte and Short. See ticket #2087. + */ + final def maxValueType(other: BType): BType = { + + def uncomparable: Nothing = throw new AssertionError(s"Cannot compute maxValueType: $this, $other") + + if (!other.isPrimitive && !other.isNothingType) uncomparable + + if (other.isNothingType) return this + if (this == other) return this + + this match { + case BYTE => + if (other == CHAR) INT + else if (other.isNumericType) other + else uncomparable + + case SHORT => + other match { + case BYTE => SHORT + case CHAR => INT + case INT | LONG | FLOAT | DOUBLE => other + case _ => uncomparable + } + + case CHAR => + other match { + case BYTE | SHORT => INT + case INT | LONG | FLOAT | DOUBLE => other + case _ => uncomparable + } + + case INT => + other match { + case BYTE | SHORT | CHAR => INT + case LONG | FLOAT | DOUBLE => other + case _ => uncomparable + } + + case LONG => + other match { + case INT | BYTE | LONG | CHAR | SHORT => LONG + case DOUBLE => DOUBLE + case FLOAT => FLOAT + case _ => uncomparable + } + + case FLOAT => + if (other == DOUBLE) DOUBLE + else if (other.isNumericType) FLOAT + else uncomparable + + case DOUBLE => + if (other.isNumericType) DOUBLE + else uncomparable + + case UNIT | BOOL => uncomparable + } + } + } + + case object UNIT extends PrimitiveBType + case object BOOL extends PrimitiveBType + case object CHAR extends PrimitiveBType + case object BYTE extends PrimitiveBType + case object SHORT extends PrimitiveBType + case object INT extends PrimitiveBType + case object FLOAT extends PrimitiveBType + case object LONG extends PrimitiveBType + case object DOUBLE extends PrimitiveBType + + sealed trait RefBType extends BType { + /** + * The class or array type of this reference type. Used for ANEWARRAY, MULTIANEWARRAY, + * INSTANCEOF and CHECKCAST instructions. Also used for emitting invokevirtual calls to + * (a: Array[T]).clone() for any T, see genApply. + * + * In contrast to the descriptor, this string does not contain the surrounding 'L' and ';' for + * class types, for example "java/lang/String". + * However, for array types, the full descriptor is used, for example "[Ljava/lang/String;". + * + * This can be verified for example using javap or ASMifier. + */ + def classOrArrayType: String = this match { + case ClassBType(internalName) => internalName + case a: ArrayBType => a.descriptor + } + } + + /** + * InnerClass and EnclosingMethod attributes (EnclosingMethod is displayed as OUTERCLASS in asm). + * + * In this summary, "class" means "class or interface". + * + * JLS: http://docs.oracle.com/javase/specs/jls/se8/html/index.html + * JVMS: http://docs.oracle.com/javase/specs/jvms/se8/html/index.html + * + * Terminology + * ----------- + * + * - Nested class (JLS 8): class whose declaration occurs within the body of another class + * + * - Top-level class (JLS 8): non-nested class + * + * - Inner class (JLS 8.1.3): nested class that is not (explicitly or implicitly) static + * + * - Member class (JLS 8.5): class directly enclosed in the body of a class (and not, for + * example, defined in a method). Member classes cannot be anonymous. May be static. + * + * - Local class (JLS 14.3): nested, non-anonymous class that is not a member of a class + * - cannot be static (therefore they are "inner" classes) + * - can be defined in a method, a constructor or in an initializer block + * + * - Initializer block (JLS 8.6 / 8.7): block of statements in a java class + * - static initializer: executed before constructor body + * - instance initializer: executed when class is initialized (instance creation, static + * field access, ...) + * + * - A static nested class can be defined as + * - a static member class (explicitly static), or + * - a member class of an interface (implicitly static) + * - local classes are never static, even if they are defined in a static method. + * + * Note: it is NOT the case that all inner classes (non-static) have an outer pointer. Example: + * class C { static void foo { class D {} } } + * The class D is an inner class (non-static), but javac does not add an outer pointer to it. + * + * InnerClass + * ---------- + * + * The JVMS 4.7.6 requires an entry for every class mentioned in a CONSTANT_Class_info in the + * constant pool (CP) that is not a member of a package (JLS 7.1). + * + * The JLS 13.1, points 9. / 10. requires: a class must reference (in the CP) + * - its immediately enclosing class + * - all of its member classes + * - all local and anonymous classes that are referenced (or declared) elsewhere (method, + * constructor, initializer block, field initializer) + * + * In a comment, the 4.7.6 spec says: this implies an entry in the InnerClass attribute for + * - All enclosing classes (except the outermost, which is top-level) + * - My comment: not sure how this is implied, below (*) a Java counter-example. + * In any case, the Java compiler seems to add all enclosing classes, even if they are not + * otherwise mentioned in the CP. So we should do the same. + * - All nested classes (including anonymous and local, but not transitively) + * + * Fields in the InnerClass entries: + * - inner class: the (nested) class C we are talking about + * - outer class: the class of which C is a member. Has to be null for non-members, i.e. for + * local and anonymous classes. NOTE: this co-incides with the presence of an + * EnclosingMethod attribute (see below) + * - inner name: A string with the simple name of the inner class. Null for anonymous classes. + * - flags: access property flags, details in JVMS, table in 4.7.6. Static flag: see + * discussion below. + * + * + * Note 1: when a nested class is present in the InnerClass attribute, all of its enclosing + * classes have to be present as well (by the rules above). Example: + * + * class Outer { class I1 { class I2 { } } } + * class User { Outer.I1.I2 foo() { } } + * + * The return type "Outer.I1.I2" puts "Outer$I1$I2" in the CP, therefore the class is added to the + * InnerClass attribute. For this entry, the "outer class" field will be "Outer$I1". This in turn + * adds "Outer$I1" to the CP, which requires adding that class to the InnerClass attribute. + * (For local / anonymous classes this would not be the case, since the "outer class" attribute + * would be empty. However, no class (other than the enclosing class) can refer to them, as they + * have no name.) + * + * In the current implementation of the Scala compiler, when adding a class to the InnerClass + * attribute, all of its enclosing classes will be added as well. Javac seems to do the same, + * see (*). + * + * + * Note 2: If a class name is mentioned only in a CONSTANT_Utf8_info, but not in a + * CONSTANT_Class_info, the JVMS does not require an entry in the InnerClass attribute. However, + * the Java compiler seems to add such classes anyway. For example, when using an annotation, the + * annotation class is stored as a CONSTANT_Utf8_info in the CP: + * + * @O.Ann void foo() { } + * + * adds "const #13 = Asciz LO$Ann;;" in the constant pool. The "RuntimeInvisibleAnnotations" + * attribute refers to that constant pool entry. Even though there is no other reference to + * `O.Ann`, the java compiler adds an entry for that class to the InnerClass attribute (which + * entails adding a CONSTANT_Class_info for the class). + * + * + * + * EnclosingMethod + * --------------- + * + * JVMS 4.7.7: the attribute must be present "if and only if it represents a local class + * or an anonymous class" (i.e. not for member classes). + * + * The attribute is mis-named, it should be called "EnclosingClass". It has to be defined for all + * local and anonymous classes, no matter if there is an enclosing method or not. Accordingly, the + * "class" field (see below) must be always defined, while the "method" field may be null. + * + * NOTE: When a EnclosingMethod attribute is required (local and anonymous classes), the "outer" + * field in the InnerClass table must be null. + * + * Fields: + * - class: the enclosing class + * - method: the enclosing method (or constructor). Null if the class is not enclosed by a + * method, i.e. for + * - local or anonymous classes defined in (static or non-static) initializer blocks + * - anonymous classes defined in initializer blocks or field initializers + * + * Note: the field is required for anonymous classes defined within local variable + * initializers (within a method), Java example below (**). + * + * For local and anonymous classes in initializer blocks or field initializers, and + * class-level anonymous classes, the scala compiler sets the "method" field to null. + * + * + * (*) + * public class Test { + * void foo() { + * class Foo1 { + * // constructor statement block + * { + * class Foo2 { + * class Foo3 { } + * } + * } + * } + * } + * } + * + * The class file Test$1Foo1$1Foo2$Foo3 has no reference to the class Test$1Foo1, however it + * still contains an InnerClass attribute for Test$1Foo1. + * Maybe this is just because the Java compiler follows the JVMS comment ("InnerClasses + * information for each enclosing class"). + * + * + * (**) + * void foo() { + * // anonymous class defined in local variable initializer expression. + * Runnable x = true ? (new Runnable() { + * public void run() { return; } + * }) : null; + * } + * + * The EnclosingMethod attribute of the anonymous class mentions "foo" in the "method" field. + * + * + * Java Compatibility + * ------------------ + * + * In the InnerClass entry for classes in top-level modules, the "outer class" is emitted as the + * mirror class (or the existing companion class), i.e. C1 is nested in T (not T$). + * For classes nested in a nested object, the "outer class" is the module class: C2 is nested in T$N$ + * object T { + * class C1 + * object N { class C2 } + * } + * + * Reason: java compat. It's a "best effort" "solution". If you want to use "C1" from Java, you + * can write "T.C1", and the Java compiler will translate that to the classfile T$C1. + * + * If we would emit the "outer class" of C1 as "T$", then in Java you'd need to write "T$.C1" + * because the java compiler looks at the InnerClass attribute to find if an inner class exists. + * However, the Java compiler would then translate the '.' to '$' and you'd get the class name + * "T$$C1". This class file obviously does not exist. + * + * Directly using the encoded class name "T$C1" in Java does not work: since the classfile + * describes a nested class, the Java compiler hides it from the classpath and will report + * "cannot find symbol T$C1". This means that the class T.N.C2 cannot be referenced from a + * Java source file in any way. + * + * + * STATIC flag + * ----------- + * + * Java: static member classes have the static flag in the InnerClass attribute, for example B in + * class A { static class B { } } + * + * The spec is not very clear about when the static flag should be emitted. It says: "Marked or + * implicitly static in source." + * + * The presence of the static flag does NOT coincide with the absence of an "outer" field in the + * class. The java compiler never puts the static flag for local classes, even if they don't have + * an outer pointer: + * + * class A { + * void f() { class B {} } + * static void g() { calss C {} } + * } + * + * B has an outer pointer, C doesn't. Both B and C are NOT marked static in the InnerClass table. + * + * It seems sane to follow the same principle in the Scala compiler. So: + * + * package p + * object O1 { + * class C1 // static inner class + * object O2 { // static inner module + * def f = { + * class C2 { // non-static inner class, even though there's no outer pointer + * class C3 // non-static, has an outer pointer + * } + * } + * } + * } + * + * Mirror Classes + * -------------- + * + * TODO: innerclass attributes on mirror class + */ + + /** + * A ClassBType represents a class or interface type. The necessary information to build a + * ClassBType is extracted from compiler symbols and types, see BTypesFromSymbols. + * + * The `offset` and `length` fields are used to represent the internal name of the class. They + * are indices into some character array. The internal name can be obtained through the method + * `internalNameString`, which is abstract in this component. Name creation is assumed to be + * hash-consed, so if two ClassBTypes have the same internal name, they NEED to have the same + * `offset` and `length`. + * + * The actual implementation in subclass BTypesFromSymbols uses the global `chrs` array from the + * name table. This representation is efficient because the JVM class name is obtained through + * `classSymbol.javaBinaryName`. This already adds the necessary string to the `chrs` array, + * so it makes sense to reuse the same name table in the backend. + * + * ClassBType is not a case class because we want a custom equals method, and because the + * extractor extracts the internalName, which is what you typically need. + */ + final class ClassBType(val internalName: String) extends RefBType { + /** + * Write-once variable allows initializing a cyclic graph of infos. This is required for + * nested classes. Example: for the definition `class A { class B }` we have + * + * B.info.nestedInfo.outerClass == A + * A.info.memberClasses contains B + */ + private var _info: ClassInfo = null + + def info: ClassInfo = { + assert(_info != null, s"ClassBType.info not yet assigned: $this") + _info + } + + def info_=(i: ClassInfo): Unit = { + assert(_info == null, s"Cannot set ClassBType.info multiple times: $this") + _info = i + checkInfoConsistency() + } + + classBTypeFromInternalNameMap(internalName) = this + + private def checkInfoConsistency(): Unit = { + // we assert some properties. however, some of the linked ClassBType (members, superClass, + // interfaces) may not yet have an `_info` (initialization of cyclic structures). so we do a + // best-effort verification. + def ifInit(c: ClassBType)(p: ClassBType => Boolean): Boolean = c._info == null || p(c) + + def isJLO(t: ClassBType) = t.internalName == "java/lang/Object" + + assert(!ClassBType.isInternalPhantomType(internalName), s"Cannot create ClassBType for phantom type $this") + + assert( + if (info.superClass.isEmpty) { isJLO(this) || (DottyBackendInterface.isCompilingPrimitive && ClassBType.hasNoSuper(internalName)) } + else if (isInterface) isJLO(info.superClass.get) + else !isJLO(this) && ifInit(info.superClass.get)(!_.isInterface), + s"Invalid superClass in $this: ${info.superClass}" + ) + assert( + info.interfaces.forall(c => ifInit(c)(_.isInterface)), + s"Invalid interfaces in $this: ${info.interfaces}" + ) + + assert(info.memberClasses.forall(c => ifInit(c)(_.isNestedClass)), info.memberClasses) + } + + /** + * The internal name of a class is the string returned by java.lang.Class.getName, with all '.' + * replaced by '/'. For example "java/lang/String". + */ + //def internalName: String = internalNameString(offset, length) + + /** + * @return The class name without the package prefix + */ + def simpleName: String = internalName.split("/").last + + def isInterface = (info.flags & asm.Opcodes.ACC_INTERFACE) != 0 + + def superClassesTransitive: List[ClassBType] = info.superClass match { + case None => Nil + case Some(sc) => sc :: sc.superClassesTransitive + } + + def isNestedClass = info.nestedInfo.isDefined + + def enclosingNestedClassesChain: List[ClassBType] = + if (isNestedClass) this :: info.nestedInfo.get.enclosingClass.enclosingNestedClassesChain + else Nil + + def innerClassAttributeEntry: Option[InnerClassEntry] = info.nestedInfo map { + case NestedInfo(_, outerName, innerName, isStaticNestedClass) => + import GenBCodeOps.addFlagIf + InnerClassEntry( + internalName, + outerName.orNull, + innerName.orNull, + info.flags.addFlagIf(isStaticNestedClass, asm.Opcodes.ACC_STATIC) + & ClassBType.INNER_CLASSES_FLAGS + ) + } + + def isSubtypeOf(other: ClassBType): Boolean = { + if (this == other) return true + + if (isInterface) { + if (other == ObjectRef) return true // interfaces conform to Object + if (!other.isInterface) return false // this is an interface, the other is some class other than object. interfaces cannot extend classes, so the result is false. + // else: this and other are both interfaces. continue to (*) + } else { + val sc = info.superClass + if (sc.isDefined && sc.get.isSubtypeOf(other)) return true // the superclass of this class conforms to other + if (!other.isInterface) return false // this and other are both classes, and the superclass of this does not conform + // else: this is a class, the other is an interface. continue to (*) + } + + // (*) check if some interface of this class conforms to other. + info.interfaces.exists(_.isSubtypeOf(other)) + } + + /** + * Finding the least upper bound in agreement with the bytecode verifier + * Background: + * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf + * http://comments.gmane.org/gmane.comp.java.vm.languages/2293 + * https://issues.scala-lang.org/browse/SI-3872 + */ + def jvmWiseLUB(other: ClassBType): ClassBType = { + def isNotNullOrNothing(c: ClassBType) = !c.isNullType && !c.isNothingType + assert(isNotNullOrNothing(this) && isNotNullOrNothing(other), s"jvmWiseLub for null or nothing: $this - $other") + + val res: ClassBType = (this.isInterface, other.isInterface) match { + case (true, true) => + // exercised by test/files/run/t4761.scala + if (other.isSubtypeOf(this)) this + else if (this.isSubtypeOf(other)) other + else ObjectRef + + case (true, false) => + if (other.isSubtypeOf(this)) this else ObjectRef + + case (false, true) => + if (this.isSubtypeOf(other)) other else ObjectRef + + case _ => + // TODO @lry I don't really understand the reasoning here. + // Both this and other are classes. The code takes (transitively) all superclasses and + // finds the first common one. + // MOST LIKELY the answer can be found here, see the comments and links by Miguel: + // - https://issues.scala-lang.org/browse/SI-3872 + firstCommonSuffix(this :: this.superClassesTransitive, other :: other.superClassesTransitive) + } + + assert(isNotNullOrNothing(res), s"jvmWiseLub computed: $res") + res + } + + private def firstCommonSuffix(as: List[ClassBType], bs: List[ClassBType]): ClassBType = { + var chainA = as + var chainB = bs + var fcs: ClassBType = null + while { + if (chainB contains chainA.head) fcs = chainA.head + else if (chainA contains chainB.head) fcs = chainB.head + else { + chainA = chainA.tail + chainB = chainB.tail + } + fcs == null + } do () + fcs + } + + /** + * Custom equals / hashCode: we only compare the name (offset / length) + */ + override def equals(o: Any): Boolean = (this eq o.asInstanceOf[Object]) || (o match { + case c: ClassBType @unchecked => c.internalName == this.internalName + case _ => false + }) + + override def hashCode: Int = { + import scala.runtime.Statics + var acc: Int = -889275714 + acc = Statics.mix(acc, internalName.hashCode) + Statics.finalizeHash(acc, 2) + } + } + + object ClassBType { + /** + * Pattern matching on a ClassBType extracts the `internalName` of the class. + */ + def unapply(c: ClassBType): Some[String] = Some(c.internalName) + + /** + * Valid flags for InnerClass attribute entry. + * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6 + */ + private val INNER_CLASSES_FLAGS = { + asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | + asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE | + asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION | + asm.Opcodes.ACC_ENUM + } + + // Primitive classes have no super class. A ClassBType for those is only created when + // they are actually being compiled (e.g., when compiling scala/Boolean.scala). + private val hasNoSuper = Set( + "scala/Unit", + "scala/Boolean", + "scala/Char", + "scala/Byte", + "scala/Short", + "scala/Int", + "scala/Float", + "scala/Long", + "scala/Double" + ) + + private val isInternalPhantomType = Set( + "scala/Null", + "scala/Nothing" + ) + } + + /** + * The type info for a class. Used for symboltable-independent subtype checks in the backend. + * + * @param superClass The super class, not defined for class java/lang/Object. + * @param interfaces All transitively implemented interfaces, except for those inherited + * through the superclass. + * @param flags The java flags, obtained through `javaFlags`. Used also to derive + * the flags for InnerClass entries. + * @param memberClasses Classes nested in this class. Those need to be added to the + * InnerClass table, see the InnerClass spec summary above. + * @param nestedInfo If this describes a nested class, information for the InnerClass table. + */ + case class ClassInfo(superClass: Option[ClassBType], interfaces: List[ClassBType], flags: Int, + memberClasses: List[ClassBType], nestedInfo: Option[NestedInfo]) + + /** + * Information required to add a class to an InnerClass table. + * The spec summary above explains what information is required for the InnerClass entry. + * + * @param enclosingClass The enclosing class, if it is also nested. When adding a class + * to the InnerClass table, enclosing nested classes are also added. + * @param outerName The outerName field in the InnerClass entry, may be None. + * @param innerName The innerName field, may be None. + * @param isStaticNestedClass True if this is a static nested class (not inner class) (*) + * + * (*) Note that the STATIC flag in ClassInfo.flags, obtained through javaFlags(classSym), is not + * correct for the InnerClass entry, see javaFlags. The static flag in the InnerClass describes + * a source-level propety: if the class is in a static context (does not have an outer pointer). + * This is checked when building the NestedInfo. + */ + case class NestedInfo(enclosingClass: ClassBType, + outerName: Option[String], + innerName: Option[String], + isStaticNestedClass: Boolean) + + /** + * This class holds the data for an entry in the InnerClass table. See the InnerClass summary + * above in this file. + * + * There's some overlap with the class NestedInfo, but it's not exactly the same and cleaner to + * keep separate. + * @param name The internal name of the class. + * @param outerName The internal name of the outer class, may be null. + * @param innerName The simple name of the inner class, may be null. + * @param flags The flags for this class in the InnerClass entry. + */ + case class InnerClassEntry(name: String, outerName: String, innerName: String, flags: Int) + + case class ArrayBType(componentType: BType) extends RefBType { + def dimension: Int = componentType match { + case a: ArrayBType => 1 + a.dimension + case _ => 1 + } + + def elementType: BType = componentType match { + case a: ArrayBType => a.elementType + case t => t + } + } + + case class MethodBType(argumentTypes: List[BType], returnType: BType) extends BType + + /* Some definitions that are required for the implementation of BTypes. They are abstract because + * initializing them requires information from types / symbols, which is not accessible here in + * BTypes. + * + * They are defs (not vals) because they are implemented using vars (see comment on CoreBTypes). + */ + + /** + * Just a named pair, used in CoreBTypes.asmBoxTo/asmUnboxTo. + */ + /*final*/ case class MethodNameAndType(name: String, methodType: MethodBType) +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala b/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala new file mode 100644 index 000000000000..54dafe6f0032 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala @@ -0,0 +1,348 @@ +package dotty.tools +package backend +package jvm + +import scala.tools.asm +import scala.annotation.threadUnsafe +import scala.collection.mutable +import scala.collection.mutable.Clearable + +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Phases._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.core.StdNames + +/** + * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary + * information from a symbol and its type to create the corresponding ClassBType. It requires + * access to the compiler (global parameter). + * + * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes + * uses classBTypeFromSymbol, hence requires access to the compiler (global). + * + * BTypesFromSymbols extends BTypes because the implementation of BTypes requires access to some + * of the core btypes. They are declared in BTypes as abstract members. Note that BTypes does + * not have access to the compiler instance. + */ +class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { + import int.{_, given} + import DottyBackendInterface.{symExtensions, _} + + lazy val TransientAttr = requiredClass[scala.transient] + lazy val VolatileAttr = requiredClass[scala.volatile] + + val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int) + import bCodeAsmCommon._ + + // Why the proxy, see documentation of class [[CoreBTypes]]. + val coreBTypes: CoreBTypesProxy[this.type] = new CoreBTypesProxy[this.type](this) + import coreBTypes._ + + final def intializeCoreBTypes(): Unit = { + coreBTypes.setBTypes(new CoreBTypes[this.type](this)) + } + + private[this] val perRunCaches: Caches = new Caches { + def newAnyRefMap[K <: AnyRef, V](): mutable.AnyRefMap[K, V] = new mutable.AnyRefMap[K, V]() + def newWeakMap[K, V](): mutable.WeakHashMap[K, V] = new mutable.WeakHashMap[K, V]() + def recordCache[T <: Clearable](cache: T): T = cache + def newMap[K, V](): mutable.HashMap[K, V] = new mutable.HashMap[K, V]() + def newSet[K](): mutable.Set[K] = new mutable.HashSet[K] + } + + // TODO remove abstraction + private abstract class Caches { + def recordCache[T <: Clearable](cache: T): T + def newWeakMap[K, V](): collection.mutable.WeakHashMap[K, V] + def newMap[K, V](): collection.mutable.HashMap[K, V] + def newSet[K](): collection.mutable.Set[K] + def newAnyRefMap[K <: AnyRef, V](): collection.mutable.AnyRefMap[K, V] + } + + @threadUnsafe protected lazy val classBTypeFromInternalNameMap = { + perRunCaches.recordCache(collection.concurrent.TrieMap.empty[String, ClassBType]) + } + + /** + * Cache for the method classBTypeFromSymbol. + */ + @threadUnsafe private lazy val convertedClasses = perRunCaches.newMap[Symbol, ClassBType]() + + /** + * The ClassBType for a class symbol `sym`. + */ + final def classBTypeFromSymbol(classSym: Symbol): ClassBType = { + assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") + assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") + assert( + (!primitiveTypeMap.contains(classSym) || isCompilingPrimitive) && + (classSym != defn.NothingClass && classSym != defn.NullClass), + s"Cannot create ClassBType for special class symbol ${classSym.showFullName}") + + convertedClasses.getOrElse(classSym, { + val internalName = classSym.javaBinaryName + // We first create and add the ClassBType to the hash map before computing its info. This + // allows initializing cylic dependencies, see the comment on variable ClassBType._info. + val classBType = new ClassBType(internalName) + convertedClasses(classSym) = classBType + setClassInfo(classSym, classBType) + }) + } + + final def mirrorClassBTypeFromSymbol(moduleClassSym: Symbol): ClassBType = { + assert(moduleClassSym.isTopLevelModuleClass, s"not a top-level module class: $moduleClassSym") + val internalName = moduleClassSym.javaBinaryName.stripSuffix(StdNames.str.MODULE_SUFFIX) + val bType = ClassBType(internalName) + bType.info = ClassInfo( + superClass = Some(ObjectRef), + interfaces = Nil, + flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, + memberClasses = getMemberClasses(moduleClassSym).map(classBTypeFromSymbol), + nestedInfo = None + ) + bType + } + + private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = { + val superClassSym: Symbol = { + val t = classSym.asClass.superClass + if (t.exists) t + else if (classSym.is(ModuleClass)) { + // workaround #371 + + println(s"Warning: mocking up superclass for $classSym") + defn.ObjectClass + } + else t + } + assert( + if (classSym == defn.ObjectClass) + superClassSym == NoSymbol + else if (classSym.isInterface) + superClassSym == defn.ObjectClass + else + // A ClassBType for a primitive class (scala.Boolean et al) is only created when compiling these classes. + ((superClassSym != NoSymbol) && !superClassSym.isInterface) || (isCompilingPrimitive && primitiveTypeMap.contains(classSym)), + s"Bad superClass for $classSym: $superClassSym" + ) + val superClass = if (superClassSym == NoSymbol) None + else Some(classBTypeFromSymbol(superClassSym)) + + /** + * All interfaces implemented by a class, except for those inherited through the superclass. + * Redundant interfaces are removed unless there is a super call to them. + */ + extension (sym: Symbol) def superInterfaces: List[Symbol] = { + val directlyInheritedTraits = sym.directlyInheritedTraits + val directlyInheritedTraitsSet = directlyInheritedTraits.toSet + val allBaseClasses = directlyInheritedTraits.iterator.flatMap(_.asClass.baseClasses.drop(1)).toSet + val superCalls = superCallsMap.getOrElse(sym, Set.empty) + val additional = (superCalls -- directlyInheritedTraitsSet).filter(_.is(Trait)) +// if (additional.nonEmpty) +// println(s"$fullName: adding supertraits $additional") + directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCalls(t)) ++ additional + } + + val interfaces = classSym.superInterfaces.map(classBTypeFromSymbol) + + val flags = javaFlags(classSym) + + /* The InnerClass table of a class C must contain all nested classes of C, even if they are only + * declared but not otherwise referenced in C (from the bytecode or a method / field signature). + * We collect them here. + */ + val nestedClassSymbols = { + // The lambdalift phase lifts all nested classes to the enclosing class, so if we collect + // member classes right after lambdalift, we obtain all nested classes, including local and + // anonymous ones. + val nestedClasses = getNestedClasses(classSym) + + // If this is a top-level class, and it has a companion object, the member classes of the + // companion are added as members of the class. For example: + // class C { } + // object C { + // class D + // def f = { class E } + // } + // The class D is added as a member of class C. The reason is that the InnerClass attribute + // for D will containt class "C" and NOT the module class "C$" as the outer class of D. + // This is done by buildNestedInfo, the reason is Java compatibility, see comment in BTypes. + // For consistency, the InnerClass entry for D needs to be present in C - to Java it looks + // like D is a member of C, not C$. + val linkedClass = classSym.linkedClass + val companionModuleMembers = { + if (classSym.linkedClass.isTopLevelModuleClass) getMemberClasses(classSym.linkedClass) + else Nil + } + + nestedClasses ++ companionModuleMembers + } + + /** + * For nested java classes, the scala compiler creates both a class and a module (and therefore + * a module class) symbol. For example, in `class A { class B {} }`, the nestedClassSymbols + * for A contain both the class B and the module class B. + * Here we get rid of the module class B, making sure that the class B is present. + */ + val nestedClassSymbolsNoJavaModuleClasses = nestedClassSymbols.filter(s => { + if (s.is(JavaDefined) && s.is(ModuleClass)) { + // We could also search in nestedClassSymbols for s.linkedClassOfClass, but sometimes that + // returns NoSymbol, so it doesn't work. + val nb = nestedClassSymbols.count(mc => mc.name == s.name && mc.owner == s.owner) + // this assertion is specific to how ScalaC works. It doesn't apply to dotty, as n dotty there will be B & B$ + // assert(nb == 2, s"Java member module without member class: $s - $nestedClassSymbols") + false + } else true + }) + + val memberClasses = nestedClassSymbolsNoJavaModuleClasses.map(classBTypeFromSymbol) + + val nestedInfo = buildNestedInfo(classSym) + + classBType.info = ClassInfo(superClass, interfaces, flags, memberClasses, nestedInfo) + classBType + } + + /** For currently compiled classes: All locally defined classes including local classes. + * The empty list for classes that are not currently compiled. + */ + private def getNestedClasses(sym: Symbol): List[Symbol] = definedClasses(sym, flattenPhase) + + /** For currently compiled classes: All classes that are declared as members of this class + * (but not inherited ones). The empty list for classes that are not currently compiled. + */ + private def getMemberClasses(sym: Symbol): List[Symbol] = definedClasses(sym, lambdaLiftPhase) + + private def definedClasses(sym: Symbol, phase: Phase) = + if (sym.isDefinedInCurrentRun) + atPhase(phase) { + toDenot(sym).info.decls.filter(sym => sym.isClass && !sym.isEffectivelyErased) + } + else Nil + + private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = { + assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym") + + val isNested = !innerClassSym.originalOwner.originalLexicallyEnclosingClass.is(PackageClass) + if (!isNested) None + else { + // See comment in BTypes, when is a class marked static in the InnerClass table. + val isStaticNestedClass = innerClassSym.originalOwner.originalLexicallyEnclosingClass.isOriginallyStaticOwner + + // After lambdalift (which is where we are), the rawowoner field contains the enclosing class. + val enclosingClassSym = { + if (innerClassSym.isClass) { + atPhase(flattenPhase.prev) { + toDenot(innerClassSym).owner.enclosingClass + } + } + else atPhase(flattenPhase.prev)(innerClassSym.enclosingClass) + } //todo is handled specially for JavaDefined symbols in scalac + + val enclosingClass: ClassBType = classBTypeFromSymbol(enclosingClassSym) + + val outerName: Option[String] = { + if (isAnonymousOrLocalClass(innerClassSym)) { + None + } else { + val outerName = innerClassSym.originalOwner.originalLexicallyEnclosingClass.javaBinaryName + def dropModule(str: String): String = + if (!str.isEmpty && str.last == '$') str.take(str.length - 1) else str + // Java compatibility. See the big comment in BTypes that summarizes the InnerClass spec. + val outerNameModule = + if (innerClassSym.originalOwner.originalLexicallyEnclosingClass.isTopLevelModuleClass) dropModule(outerName) + else outerName + Some(outerNameModule.toString) + } + } + + val innerName: Option[String] = { + if (innerClassSym.isAnonymousClass || innerClassSym.isAnonymousFunction) None + else { + val original = innerClassSym.initial + Some(atPhase(original.validFor.phaseId)(innerClassSym.name).mangledString) // moduleSuffix for module classes + } + } + + Some(NestedInfo(enclosingClass, outerName, innerName, isStaticNestedClass)) + } + } + + /** + * This is basically a re-implementation of sym.isStaticOwner, but using the originalOwner chain. + * + * The problem is that we are interested in a source-level property. Various phases changed the + * symbol's properties in the meantime, mostly lambdalift modified (destructively) the owner. + * Therefore, `sym.isStatic` is not what we want. For example, in + * object T { def f { object U } } + * the owner of U is T, so UModuleClass.isStatic is true. Phase travel does not help here. + */ + extension (sym: Symbol) + private def isOriginallyStaticOwner: Boolean = + sym.is(PackageClass) || sym.is(ModuleClass) && sym.originalOwner.originalLexicallyEnclosingClass.isOriginallyStaticOwner + + /** + * Return the Java modifiers for the given symbol. + * Java modifiers for classes: + * - public, abstract, final, strictfp (not used) + * for interfaces: + * - the same as for classes, without 'final' + * for fields: + * - public, private (*) + * - static, final + * for methods: + * - the same as for fields, plus: + * - abstract, synchronized (not used), strictfp (not used), native (not used) + * for all: + * - deprecated + * + * (*) protected cannot be used, since inner classes 'see' protected members, + * and they would fail verification after lifted. + */ + final def javaFlags(sym: Symbol): Int = { + + // Classes are always emitted as public. This matches the behavior of Scala 2 + // and is necessary for object deserialization to work properly, otherwise + // ModuleSerializationProxy may fail with an accessiblity error (see + // tests/run/serialize.scala and https://github.com/typelevel/cats-effect/pull/2360). + val privateFlag = !sym.isClass && (sym.is(Private) || (sym.isPrimaryConstructor && sym.owner.isTopLevelModuleClass)) + + val finalFlag = sym.is(Final) && !toDenot(sym).isClassConstructor && !sym.is(Mutable, butNot = Accessor) && !sym.enclosingClass.is(Trait) + + import asm.Opcodes._ + import GenBCodeOps.addFlagIf + 0 .addFlagIf(privateFlag, ACC_PRIVATE) + .addFlagIf(!privateFlag, ACC_PUBLIC) + .addFlagIf(sym.is(Deferred) || sym.isOneOf(AbstractOrTrait), ACC_ABSTRACT) + .addFlagIf(sym.isInterface, ACC_INTERFACE) + .addFlagIf(finalFlag + // Primitives are "abstract final" to prohibit instantiation + // without having to provide any implementations, but that is an + // illegal combination of modifiers at the bytecode level so + // suppress final if abstract if present. + && !sym.isOneOf(AbstractOrTrait) + // Mixin forwarders are bridges and can be final, but final bridges confuse some frameworks + && !sym.is(Bridge), ACC_FINAL) + .addFlagIf(sym.isStaticMember, ACC_STATIC) + .addFlagIf(sym.is(Bridge), ACC_BRIDGE | ACC_SYNTHETIC) + .addFlagIf(sym.is(Artifact), ACC_SYNTHETIC) + .addFlagIf(sym.isClass && !sym.isInterface, ACC_SUPER) + .addFlagIf(sym.isAllOf(JavaEnumTrait), ACC_ENUM) + .addFlagIf(sym.is(JavaVarargs), ACC_VARARGS) + .addFlagIf(sym.is(Synchronized), ACC_SYNCHRONIZED) + .addFlagIf(sym.isDeprecated, ACC_DEPRECATED) + .addFlagIf(sym.is(Enum), ACC_ENUM) + } + + def javaFieldFlags(sym: Symbol) = { + import asm.Opcodes._ + import GenBCodeOps.addFlagIf + javaFlags(sym) + .addFlagIf(sym.hasAnnotation(TransientAttr), ACC_TRANSIENT) + .addFlagIf(sym.hasAnnotation(VolatileAttr), ACC_VOLATILE) + .addFlagIf(!sym.is(Mutable), ACC_FINAL) + } +} diff --git a/compiler/src/dotty/tools/backend/jvm/BytecodeWriters.scala b/tests/pos-with-compiler-cc/backend/jvm/BytecodeWriters.scala similarity index 100% rename from compiler/src/dotty/tools/backend/jvm/BytecodeWriters.scala rename to tests/pos-with-compiler-cc/backend/jvm/BytecodeWriters.scala diff --git a/tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java b/tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java new file mode 100644 index 000000000000..c5594ae3dea6 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.backend.jvm; + +import scala.tools.asm.MethodVisitor; +import scala.tools.asm.Opcodes; +import scala.tools.asm.tree.ClassNode; +import scala.tools.asm.tree.MethodNode; + +/** + * A subclass of {@link ClassNode} to customize the representation of + * label nodes with {@link LabelNode1}. + */ +public class ClassNode1 extends ClassNode { + public ClassNode1() { + this(Opcodes.ASM6); + } + + public ClassNode1(int api) { + super(api); + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodNode method = new MethodNode1(access, name, descriptor, signature, exceptions); + methods.add(method); + return method; + } +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala b/tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala new file mode 100644 index 000000000000..299c1c75d6cf --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala @@ -0,0 +1,48 @@ +package dotty.tools.backend.jvm + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Phases._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Flags.Trait +import dotty.tools.dotc.transform.MegaPhase.MiniPhase + +/** Collect all super calls to trait members. + * + * For each super reference to trait member, register a call from the current class to the + * owner of the referenced member. + * + * This information is used to know if it is safe to remove a redundant mixin class. + * A redundant mixin class is one that is implemented by another mixin class. As the + * methods in a redundant mixin class could be implemented with a default abstract method, + * the redundant mixin class could be required as a parent by the JVM. + */ +class CollectSuperCalls extends MiniPhase { + import tpd._ + + override def phaseName: String = CollectSuperCalls.name + + override def description: String = CollectSuperCalls.description + + override def transformSelect(tree: Select)(using Context): Tree = { + tree.qualifier match { + case sup: Super => + if (tree.symbol.owner.is(Trait)) + registerSuperCall(ctx.owner.enclosingClass.asClass, tree.symbol.owner.asClass) + case _ => + } + tree + } + + private def registerSuperCall(sym: ClassSymbol, calls: ClassSymbol)(using Context) = { + genBCodePhase match { + case genBCodePhase: GenBCode => + genBCodePhase.registerSuperCall(sym, calls) + case _ => + } + } +} + +object CollectSuperCalls: + val name: String = "collectSuperCalls" + val description: String = "find classes that are called with super" diff --git a/tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala b/tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala new file mode 100644 index 000000000000..d5fce3f53627 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala @@ -0,0 +1,294 @@ +package dotty.tools +package backend +package jvm + + +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.transform.Erasure +import scala.tools.asm.{Handle, Opcodes} +import dotty.tools.dotc.core.StdNames + +/** + * Core BTypes and some other definitions. The initialization of these definitions requies access + * to symbols / types (global). + * + * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To + * make sure the definitions are consistent with the symbols in the current run, the + * `intializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each + * compiler run. + * + * The class BTypesFromSymbols does not directly reference CoreBTypes, but CoreBTypesProxy. The + * reason is that having a `var bTypes: CoreBTypes` would not allow `import bTypes._`. Instead, the + * proxy class holds a `CoreBTypes` in a variable field and forwards to this instance. + * + * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When + * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the + * constructor will actucally go through the proxy. The lazy vals make sure the instance is assigned + * in the proxy before the fields are initialized. + * + * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap + * could not be a perRunCache anymore: the classes defeined here need to be in that map, they are + * added when the ClassBTypes are created. The per run cache removes them, so they would be missing + * in the second run. + */ +class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) { + import bTypes._ + import int.given + import DottyBackendInterface._ + + //import global._ + //import rootMirror.{requiredClass, getClassIfDefined} + //import definitions._ + + /** + * Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above + * the first use of `classBTypeFromSymbol` because that method looks at the map. + */ + lazy val primitiveTypeMap: Map[Symbol, PrimitiveBType] = Map( + defn.UnitClass -> UNIT, + defn.BooleanClass -> BOOL, + defn.CharClass -> CHAR, + defn.ByteClass -> BYTE, + defn.ShortClass -> SHORT, + defn.IntClass -> INT, + defn.LongClass -> LONG, + defn.FloatClass -> FLOAT, + defn.DoubleClass -> DOUBLE + ) + + private lazy val BOXED_UNIT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Void]) + private lazy val BOXED_BOOLEAN : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Boolean]) + private lazy val BOXED_BYTE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Byte]) + private lazy val BOXED_SHORT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Short]) + private lazy val BOXED_CHAR : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Character]) + private lazy val BOXED_INT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Integer]) + private lazy val BOXED_LONG : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Long]) + private lazy val BOXED_FLOAT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Float]) + private lazy val BOXED_DOUBLE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Double]) + + /** + * Map from primitive types to their boxed class type. Useful when pushing class literals onto the + * operand stack (ldc instruction taking a class literal), see genConstant. + */ + lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map( + UNIT -> BOXED_UNIT, + BOOL -> BOXED_BOOLEAN, + BYTE -> BOXED_BYTE, + SHORT -> BOXED_SHORT, + CHAR -> BOXED_CHAR, + INT -> BOXED_INT, + LONG -> BOXED_LONG, + FLOAT -> BOXED_FLOAT, + DOUBLE -> BOXED_DOUBLE + ) + + lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet + + /** + * Maps the method symbol for a box method to the boxed type of the result. For example, the + * method symbol for `Byte.box()` is mapped to the ClassBType `java/lang/Byte`. + */ + lazy val boxResultType: Map[Symbol, ClassBType] = { + val boxMethods = defn.ScalaValueClasses().map{x => // @darkdimius Are you sure this should be a def? + (x, Erasure.Boxing.boxMethod(x.asClass)) + }.toMap + for ((valueClassSym, boxMethodSym) <- boxMethods) + yield boxMethodSym -> boxedClassOfPrimitive(primitiveTypeMap(valueClassSym)) + } + + /** + * Maps the method symbol for an unbox method to the primitive type of the result. + * For example, the method symbol for `Byte.unbox()`) is mapped to the PrimitiveBType BYTE. */ + lazy val unboxResultType: Map[Symbol, PrimitiveBType] = { + val unboxMethods: Map[Symbol, Symbol] = + defn.ScalaValueClasses().map(x => (x, Erasure.Boxing.unboxMethod(x.asClass))).toMap + for ((valueClassSym, unboxMethodSym) <- unboxMethods) + yield unboxMethodSym -> primitiveTypeMap(valueClassSym) + } + + /* + * srNothingRef and srNullRef exist at run-time only. They are the bytecode-level manifestation (in + * method signatures only) of what shows up as NothingClass (scala.Nothing) resp. NullClass (scala.Null) in Scala ASTs. + * + * Therefore, when srNothingRef or srNullRef are to be emitted, a mapping is needed: the internal + * names of NothingClass and NullClass can't be emitted as-is. + * TODO @lry Once there's a 2.11.3 starr, use the commented argument list. The current starr crashes on the type literal `scala.runtime.Nothing$` + */ + lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) // (requiredClass[scala.runtime.Nothing$]) + lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) // (requiredClass[scala.runtime.Null$]) + + lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) + lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) + lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) + lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) + lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) + lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) + lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) + lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) // java/lang/Cloneable + lazy val jioSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) // java/io/Serializable + lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) // java/lang/ClassCastException + lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) + lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) + + lazy val srBoxesRunTimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) + + private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) + private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) + private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(defn.MethodHandleClass) + private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(defn.MethodHandlesLookupClass) + private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) + private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 + private lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) + + lazy val jliLambdaMetaFactoryMetafactoryHandle: Handle = new Handle( + Opcodes.H_INVOKESTATIC, + jliLambdaMetafactoryRef.internalName, + "metafactory", + MethodBType( + List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, jliMethodTypeRef, jliMethodHandleRef, jliMethodTypeRef), + jliCallSiteRef + ).descriptor, + /* itf = */ false) + + lazy val jliLambdaMetaFactoryAltMetafactoryHandle: Handle = new Handle( + Opcodes.H_INVOKESTATIC, + jliLambdaMetafactoryRef.internalName, + "altMetafactory", + MethodBType( + List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, ArrayBType(ObjectRef)), + jliCallSiteRef + ).descriptor, + /* itf = */ false) + + lazy val jliLambdaDeserializeBootstrapHandle: Handle = new Handle( + Opcodes.H_INVOKESTATIC, + srLambdaDeserialize.internalName, + "bootstrap", + MethodBType( + List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, ArrayBType(jliMethodHandleRef)), + jliCallSiteRef + ).descriptor, + /* itf = */ false) + + lazy val jliStringConcatFactoryMakeConcatWithConstantsHandle = new Handle( + Opcodes.H_INVOKESTATIC, + jliStringConcatFactoryRef.internalName, + "makeConcatWithConstants", + MethodBType( + List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, StringRef, ArrayBType(ObjectRef)), + jliCallSiteRef + ).descriptor, + /* itf = */ false) + + /** + * Methods in scala.runtime.BoxesRuntime + */ + lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map( + BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), BOXED_BOOLEAN)), + BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), BOXED_BYTE)), + CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), BOXED_CHAR)), + SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), BOXED_SHORT)), + INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), BOXED_INT)), + LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), BOXED_LONG)), + FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), BOXED_FLOAT)), + DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), BOXED_DOUBLE)) + ) + + lazy val asmUnboxTo: Map[BType, MethodNameAndType] = Map( + BOOL -> MethodNameAndType("unboxToBoolean", MethodBType(List(ObjectRef), BOOL)), + BYTE -> MethodNameAndType("unboxToByte", MethodBType(List(ObjectRef), BYTE)), + CHAR -> MethodNameAndType("unboxToChar", MethodBType(List(ObjectRef), CHAR)), + SHORT -> MethodNameAndType("unboxToShort", MethodBType(List(ObjectRef), SHORT)), + INT -> MethodNameAndType("unboxToInt", MethodBType(List(ObjectRef), INT)), + LONG -> MethodNameAndType("unboxToLong", MethodBType(List(ObjectRef), LONG)), + FLOAT -> MethodNameAndType("unboxToFloat", MethodBType(List(ObjectRef), FLOAT)), + DOUBLE -> MethodNameAndType("unboxToDouble", MethodBType(List(ObjectRef), DOUBLE)) + ) + + lazy val typeOfArrayOp: Map[Int, BType] = { + import dotty.tools.backend.ScalaPrimitivesOps._ + Map( + (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++ + (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++ + (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++ + (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++ + (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++ + (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++ + (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++ + (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++ + (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectRef)) : _* + ) + } +} + +/** + * This trait make some core BTypes availalbe that don't depend on a Global instance. Some core + * BTypes are required to be accessible in the BTypes trait, which does not have access to Global. + * + * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example + * the type Symbol in + * def primitiveTypeMap: Map[Symbol, PrimitiveBType] + */ +trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { + val bTypes: BTS + import bTypes._ + + def boxedClasses: Set[ClassBType] + + def srNothingRef : ClassBType + def srNullRef : ClassBType + + def ObjectRef : ClassBType + def jlCloneableRef : ClassBType + def jiSerializableRef : ClassBType +} + +/** + * See comment in class [[CoreBTypes]]. + */ +final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) extends CoreBTypesProxyGlobalIndependent[BTFS] { + import bTypes._ + + private var _coreBTypes: CoreBTypes[bTypes.type] = _ + def setBTypes(coreBTypes: CoreBTypes[BTFS]): Unit = { + _coreBTypes = coreBTypes.asInstanceOf[CoreBTypes[bTypes.type]] + } + + def primitiveTypeMap: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeMap + + def boxedClasses: Set[ClassBType] = _coreBTypes.boxedClasses + + def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _coreBTypes.boxedClassOfPrimitive + + def boxResultType: Map[Symbol, ClassBType] = _coreBTypes.boxResultType + + def unboxResultType: Map[Symbol, PrimitiveBType] = _coreBTypes.unboxResultType + + def srNothingRef : ClassBType = _coreBTypes.srNothingRef + def srNullRef : ClassBType = _coreBTypes.srNullRef + + def ObjectRef : ClassBType = _coreBTypes.ObjectRef + def StringRef : ClassBType = _coreBTypes.StringRef + def jlStringBuilderRef : ClassBType = _coreBTypes.jlStringBuilderRef + def jlStringBufferRef : ClassBType = _coreBTypes.jlStringBufferRef + def jlCharSequenceRef : ClassBType = _coreBTypes.jlCharSequenceRef + def jlClassRef : ClassBType = _coreBTypes.jlClassRef + def jlThrowableRef : ClassBType = _coreBTypes.jlThrowableRef + def jlCloneableRef : ClassBType = _coreBTypes.jlCloneableRef + def jiSerializableRef : ClassBType = _coreBTypes.jioSerializableRef + def jlClassCastExceptionRef : ClassBType = _coreBTypes.jlClassCastExceptionRef + def jlIllegalArgExceptionRef : ClassBType = _coreBTypes.jlIllegalArgExceptionRef + def jliSerializedLambdaRef : ClassBType = _coreBTypes.jliSerializedLambdaRef + + def srBoxesRuntimeRef: ClassBType = _coreBTypes.srBoxesRunTimeRef + + def jliLambdaMetaFactoryMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryMetafactoryHandle + def jliLambdaMetaFactoryAltMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle + def jliLambdaDeserializeBootstrapHandle : Handle = _coreBTypes.jliLambdaDeserializeBootstrapHandle + def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle = _coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle + + def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo + def asmUnboxTo: Map[BType, MethodNameAndType] = _coreBTypes.asmUnboxTo + + def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala b/tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala new file mode 100644 index 000000000000..a70d671f9c63 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala @@ -0,0 +1,204 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.transform.SymUtils._ +import java.io.{File => _} + +import scala.reflect.ClassTag +import dotty.tools.io.AbstractFile +import dotty.tools.dotc.core._ +import Contexts._ +import Types._ +import Symbols._ +import Phases._ +import Decorators.em + +import dotty.tools.dotc.util.ReadOnlyMap +import dotty.tools.dotc.report + +import tpd._ + +import StdNames.nme +import NameKinds.LazyBitMapName +import Names.Name + +class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: DetachedContext) { + + private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] + + def cachedDesugarIdent(i: Ident): Option[tpd.Select] = { + var found = desugared.get(i.tpe) + if (found == null) { + tpd.desugarIdent(i) match { + case sel: tpd.Select => + desugared.put(i.tpe, sel) + found = sel + case _ => + } + } + if (found == null) None else Some(found) + } + + object DesugaredSelect extends DeconstructorCommon[tpd.Tree] { + + var desugared: tpd.Select = null + + override def isEmpty: Boolean = + desugared eq null + + def _1: Tree = desugared.qualifier + + def _2: Name = desugared.name + + override def unapply(s: tpd.Tree): this.type = { + s match { + case t: tpd.Select => desugared = t + case t: Ident => + cachedDesugarIdent(t) match { + case Some(t) => desugared = t + case None => desugared = null + } + case _ => desugared = null + } + + this + } + } + + object ArrayValue extends DeconstructorCommon[tpd.JavaSeqLiteral] { + def _1: Type = field.tpe match { + case JavaArrayType(elem) => elem + case _ => + report.error(em"JavaSeqArray with type ${field.tpe} reached backend: $field", ctx.source.atSpan(field.span)) + UnspecifiedErrorType + } + def _2: List[Tree] = field.elems + } + + abstract class DeconstructorCommon[T >: Null <: AnyRef] { + var field: T = null + def get: this.type = this + def isEmpty: Boolean = field eq null + def isDefined = !isEmpty + def unapply(s: T): this.type ={ + field = s + this + } + } + +} + +object DottyBackendInterface { + + private def erasureString(clazz: Class[_]): String = { + if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]" + else clazz.getName + } + + def requiredClass(str: String)(using Context): ClassSymbol = + Symbols.requiredClass(str) + + def requiredClass[T](using evidence: ClassTag[T], ctx: Context): Symbol = + requiredClass(erasureString(evidence.runtimeClass)) + + def requiredModule(str: String)(using Context): Symbol = + Symbols.requiredModule(str) + + def requiredModule[T](using evidence: ClassTag[T], ctx: Context): Symbol = { + val moduleName = erasureString(evidence.runtimeClass) + val className = if (moduleName.endsWith("$")) moduleName.dropRight(1) else moduleName + requiredModule(className) + } + + given symExtensions: AnyRef with + extension (sym: Symbol) + + def isInterface(using Context): Boolean = (sym.is(PureInterface)) || sym.is(Trait) + + def isStaticConstructor(using Context): Boolean = (sym.isStaticMember && sym.isClassConstructor) || (sym.name eq nme.STATIC_CONSTRUCTOR) + + /** Fields of static modules will be static at backend + * + * Note that lazy val encoding assumes bitmap fields are non-static. + * See also `genPlainClass` in `BCodeSkelBuilder.scala`. + * + * TODO: remove the special handing of `LazyBitMapName` once we swtich to + * the new lazy val encoding: https://github.com/lampepfl/dotty/issues/7140 + */ + def isStaticModuleField(using Context): Boolean = + sym.owner.isStaticModuleClass && sym.isField && !sym.name.is(LazyBitMapName) + + def isStaticMember(using Context): Boolean = (sym ne NoSymbol) && + (sym.is(JavaStatic) || sym.isScalaStatic || sym.isStaticModuleField) + // guard against no sumbol cause this code is executed to select which call type(static\dynamic) to use to call array.clone + + /** + * True for module classes of modules that are top-level or owned only by objects. Module classes + * for such objects will get a MODULE$ flag and a corresponding static initializer. + */ + def isStaticModuleClass(using Context): Boolean = + (sym.is(Module)) && { + // scalac uses atPickling here + // this would not work if modules are created after pickling + // for example by specialization + val original = toDenot(sym).initial + val validity = original.validFor + atPhase(validity.phaseId) { + toDenot(sym).isStatic + } + } + + + + def originalLexicallyEnclosingClass(using Context): Symbol = + // used to populate the EnclosingMethod attribute. + // it is very tricky in presence of classes(and annonymous classes) defined inside supper calls. + if (sym.exists) { + val validity = toDenot(sym).initial.validFor + atPhase(validity.phaseId) { + toDenot(sym).lexicallyEnclosingClass + } + } else NoSymbol + + /** + * True for module classes of package level objects. The backend will generate a mirror class for + * such objects. + */ + def isTopLevelModuleClass(using Context): Boolean = + sym.is(ModuleClass) && + atPhase(flattenPhase) { + toDenot(sym).owner.is(PackageClass) + } + + def javaSimpleName(using Context): String = toDenot(sym).name.mangledString + def javaClassName(using Context): String = toDenot(sym).fullName.mangledString + def javaBinaryName(using Context): String = javaClassName.replace('.', '/') + + end extension + + end symExtensions + + private val primitiveCompilationUnits = Set( + "Unit.scala", + "Boolean.scala", + "Char.scala", + "Byte.scala", + "Short.scala", + "Int.scala", + "Float.scala", + "Long.scala", + "Double.scala" + ) + + /** + * True if the current compilation unit is of a primitive class (scala.Boolean et al). + * Used only in assertions. + */ + def isCompilingPrimitive(using Context) = { + primitiveCompilationUnits(ctx.compilationUnit.source.file.name) + } + +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala new file mode 100644 index 000000000000..71d007370fe7 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala @@ -0,0 +1,671 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Phases.Phase + +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.interfaces +import dotty.tools.dotc.report + +import dotty.tools.dotc.util.SourceFile +import java.util.Optional + +import dotty.tools.dotc.core._ +import dotty.tools.dotc.sbt.ExtractDependencies +import Contexts._ +import Phases._ +import Symbols._ +import Decorators.em + +import java.io.DataOutputStream +import java.nio.channels.ClosedByInterruptException + +import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } + +import scala.tools.asm +import scala.tools.asm.Handle +import scala.tools.asm.tree._ +import tpd._ +import StdNames._ +import dotty.tools.io._ +import scala.tools.asm.MethodTooLargeException +import scala.tools.asm.ClassTooLargeException + +class GenBCode extends Phase { + + override def phaseName: String = GenBCode.name + + override def description: String = GenBCode.description + + private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] + def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { + val old = superCallsMap.getOrElse(sym, Set.empty) + superCallsMap.update(sym, old + calls) + } + + private val entryPoints = new mutable.HashSet[String]() + def registerEntryPoint(s: String): Unit = entryPoints += s + + private var myOutput: AbstractFile = _ + + private def outputDir(using Context): AbstractFile = { + if (myOutput eq null) + myOutput = ctx.settings.outputDir.value + myOutput + } + + private var myPrimitives: DottyPrimitives = null + + override def run(using Context): Unit = + inDetachedContext: ctx ?=> + if myPrimitives == null then myPrimitives = new DottyPrimitives(ctx) + new GenBCodePipeline( + DottyBackendInterface(outputDir, superCallsMap), + myPrimitives + ).run(ctx.compilationUnit.tpdTree) + + + override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { + outputDir match + case jar: JarArchive => + updateJarManifestWithMainClass(jar, entryPoints.toList) + case _ => + try super.runOn(units) + finally outputDir match { + case jar: JarArchive => + if (ctx.run.nn.suspendedUnits.nonEmpty) + // If we close the jar the next run will not be able to write on the jar. + // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. + report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") + + jar.close() + case _ => + } + } + + private def updateJarManifestWithMainClass(jarArchive: JarArchive, entryPoints: List[String])(using Context): Unit = + val mainClass = Option.when(!ctx.settings.XmainClass.isDefault)(ctx.settings.XmainClass.value).orElse { + entryPoints match + case List(mainClass) => + Some(mainClass) + case Nil => + report.warning("No Main-Class designated or discovered.") + None + case mcs => + report.warning(s"No Main-Class due to multiple entry points:\n ${mcs.mkString("\n ")}") + None + } + mainClass.map { mc => + val manifest = Jar.WManifest() + manifest.mainClass = mc + val file = jarArchive.subdirectoryNamed("META-INF").fileNamed("MANIFEST.MF") + val os = file.output + manifest.underlying.write(os) + os.close() + } + end updateJarManifestWithMainClass +} + +object GenBCode { + val name: String = "genBCode" + val description: String = "generate JVM bytecode" +} + +class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrimitives)(using DetachedContext) extends BCodeSyncAndTry { + import DottyBackendInterface.symExtensions + + private var tree: Tree = _ + + private val sourceFile: SourceFile = ctx.compilationUnit.source + + /** Convert a `dotty.tools.io.AbstractFile` into a + * `dotty.tools.dotc.interfaces.AbstractFile`. + */ + private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = + new interfaces.AbstractFile { + override def name = absfile.name + override def path = absfile.path + override def jfile = Optional.ofNullable(absfile.file) + } + + final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit) + +// class BCodePhase() { + + private var bytecodeWriter : BytecodeWriter = null + private var mirrorCodeGen : JMirrorBuilder = null + + /* ---------------- q1 ---------------- */ + + case class Item1(arrivalPos: Int, cd: TypeDef, cunit: CompilationUnit) { + def isPoison: Boolean = { arrivalPos == Int.MaxValue } + } + private val poison1 = Item1(Int.MaxValue, null, ctx.compilationUnit) + private val q1 = new java.util.LinkedList[Item1] + + /* ---------------- q2 ---------------- */ + + case class SubItem2(classNode: asm.tree.ClassNode, + file: dotty.tools.io.AbstractFile) + + case class Item2(arrivalPos: Int, + mirror: SubItem2, + plain: SubItem2) { + def isPoison: Boolean = { arrivalPos == Int.MaxValue } + } + + private val poison2 = Item2(Int.MaxValue, null, null) + private val q2 = new _root_.java.util.LinkedList[Item2] + + /* ---------------- q3 ---------------- */ + + /* + * An item of queue-3 (the last queue before serializing to disk) contains three of these + * (one for each of mirror and plain classes). + * + * @param jclassName internal name of the class + * @param jclassBytes bytecode emitted for the class SubItem3 represents + */ + case class SubItem3( + jclassName: String, + jclassBytes: Array[Byte], + jclassFile: dotty.tools.io.AbstractFile + ) + + case class Item3(arrivalPos: Int, + mirror: SubItem3, + plain: SubItem3) { + + def isPoison: Boolean = { arrivalPos == Int.MaxValue } + } + private val i3comparator = new java.util.Comparator[Item3] { + override def compare(a: Item3, b: Item3) = { + if (a.arrivalPos < b.arrivalPos) -1 + else if (a.arrivalPos == b.arrivalPos) 0 + else 1 + } + } + private val poison3 = Item3(Int.MaxValue, null, null) + private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator) + + /* + * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2 + */ + class Worker1(needsOutFolder: Boolean) { + + private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] + private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { + val lowerCaseName = javaClassName.toLowerCase + lowerCaseNames.get(lowerCaseName) match { + case None => + lowerCaseNames.put(lowerCaseName, classSymbol) + case Some(dupClassSym) => + // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting + val (cl1, cl2) = + if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) + else (dupClassSym, classSymbol) + val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString + atPhase(typerPhase) { + if (same) + report.warning( // FIXME: This should really be an error, but then FromTasty tests fail + em"$cl1 and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) + else + report.warning( + em"""$cl1 differs only in case from ${cl2.showLocated}. + |uch classes will overwrite one another on case-insensitive filesystems.""", cl1.sourcePos) + } + } + } + + def run(): Unit = { + while (true) { + val item = q1.poll + if (item.isPoison) { + q2 add poison2 + return + } + else { + try { /*withCurrentUnit(item.cunit)*/(visit(item)) } + catch { + case ex: InterruptedException => + throw ex + case ex: Throwable => + println(s"Error while emitting ${item.cunit.source.file.name}") + throw ex + } + } + } + } + + /* + * Checks for duplicate internal names case-insensitively, + * builds ASM ClassNodes for mirror and plain classes; + * enqueues them in queue-2. + * + */ + def visit(item: Item1): Boolean = { + val Item1(arrivalPos, cd, cunit) = item + val claszSymbol = cd.symbol + + // -------------- mirror class, if needed -------------- + val mirrorC = + if (claszSymbol.isTopLevelModuleClass) { + if (claszSymbol.companionClass == NoSymbol) { + mirrorCodeGen.genMirrorClass(claszSymbol, cunit) + } else { + report.log(s"No mirror class for module with linked class: ${claszSymbol.showFullName}") + null + } + } else null + + // -------------- "plain" class -------------- + val pcb = new PlainClassBuilder(cunit) + pcb.genPlainClass(cd) + val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName) else null; + val plainC = pcb.cnode + + if (claszSymbol.isClass) // @DarkDimius is this test needed here? + for (binary <- ctx.compilationUnit.pickled.get(claszSymbol.asClass)) { + val store = if (mirrorC ne null) mirrorC else plainC + val tasty = + val outTastyFile = getFileForClassfile(outF, store.name, ".tasty") + val outstream = new DataOutputStream(outTastyFile.bufferedOutput) + try outstream.write(binary()) + catch case ex: ClosedByInterruptException => + try + outTastyFile.delete() // don't leave an empty or half-written tastyfile around after an interrupt + catch + case _: Throwable => + throw ex + finally outstream.close() + + val uuid = new TastyHeaderUnpickler(binary()).readHeader() + val lo = uuid.getMostSignificantBits + val hi = uuid.getLeastSignificantBits + + // TASTY attribute is created but only the UUID bytes are stored in it. + // A TASTY attribute has length 16 if and only if the .tasty file exists. + val buffer = new TastyBuffer(16) + buffer.writeUncompressedLong(lo) + buffer.writeUncompressedLong(hi) + buffer.bytes + + val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) + store.visitAttribute(dataAttr) + } + + + // ----------- create files + + val classNodes = List(mirrorC, plainC) + val classFiles = classNodes.map(cls => + if (outF != null && cls != null) { + try { + checkForCaseConflict(cls.name, claszSymbol) + getFileForClassfile(outF, cls.name, ".class") + } catch { + case e: FileConflictException => + report.error(em"error writing ${cls.name}: ${e.getMessage}") + null + } + } else null + ) + + // ----------- compiler and sbt's callbacks + + val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { + (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) + } + + for ((cls, clsFile) <- classNodes.zip(classFiles)) { + if (cls != null) { + val className = cls.name.replace('/', '.') + if (ctx.compilerCallback != null) + ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) + if (ctx.sbtCallback != null) { + if (isLocal) + ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), clsFile.file) + else { + ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), clsFile.file, + className, fullClassName) + } + } + } + } + + // ----------- hand over to pipeline-2 + + val item2 = + Item2(arrivalPos, + SubItem2(mirrorC, classFiles(0)), + SubItem2(plainC, classFiles(1))) + + q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. + + } // end of method visit(Item1) + + } // end of class BCodePhase.Worker1 + + /* + * Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level: + * + * (a) no optimization involves: + * - converting the plain ClassNode to byte array and placing it on queue-3 + */ + class Worker2 { + import bTypes.ClassBType + import bTypes.coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle + // lazy val localOpt = new LocalOpt(new Settings()) + + private def localOptimizations(classNode: ClassNode): Unit = { + // BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode)) + } + + + /* Return an array of all serializable lambdas in this class */ + private def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { + val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] + for (m <- classNode.methods.asScala) { + val iter = m.instructions.iterator + while (iter.hasNext) { + val insn = iter.next() + insn match { + case indy: InvokeDynamicInsnNode + if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => + import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE + val metafactoryFlags = indy.bsmArgs(3).asInstanceOf[Integer].toInt + val isSerializable = (metafactoryFlags & FLAG_SERIALIZABLE) != 0 + if isSerializable then + val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] + indyLambdaBodyMethods += implMethod + case _ => + } + } + } + indyLambdaBodyMethods.toArray + } + + /* + * Add: + * + * private static Object $deserializeLambda$(SerializedLambda l) { + * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$0](l) + * catch { + * case i: IllegalArgumentException => + * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$1](l) + * catch { + * case i: IllegalArgumentException => + * ... + * return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup${NUM_GROUPS-1}](l) + * } + * + * We use invokedynamic here to enable caching within the deserializer without needing to + * host a static field in the enclosing class. This allows us to add this method to interfaces + * that define lambdas in default methods. + * + * SI-10232 we can't pass arbitrary number of method handles to the final varargs parameter of the bootstrap + * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target + * methods. + */ + private def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { + import asm.Opcodes._ + import bTypes._ + import coreBTypes._ + + val cw = classNode + + // Make sure to reference the ClassBTypes of all types that are used in the code generated + // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to + // `classBTypeFromInternalNameMap`. When writing the classfile, the asm ClassWriter computes + // stack map frames and invokes the `getCommonSuperClass` method. This method expects all + // ClassBTypes mentioned in the source code to exist in the map. + + val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor + + val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) + def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { + mv.visitVarInsn(ALOAD, 0) + mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) + } + + val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java + val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray + val numGroups = groups.length + + import scala.tools.asm.Label + val initialLabels = Array.fill(numGroups - 1)(new Label()) + val terminalLabel = new Label + def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) + + for ((label, i) <- initialLabels.iterator.zipWithIndex) { + mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) + } + for ((label, i) <- initialLabels.iterator.zipWithIndex) { + mv.visitLabel(label) + emitLambdaDeserializeIndy(groups(i).toIndexedSeq) + mv.visitInsn(ARETURN) + } + mv.visitLabel(terminalLabel) + emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) + mv.visitInsn(ARETURN) + } + + private def setInnerClasses(classNode: ClassNode): Unit = if (classNode != null) { + classNode.innerClasses.clear() + val (declared, referred) = collectNestedClasses(classNode) + addInnerClasses(classNode, declared, referred) + } + + /** + * Visit the class node and collect all referenced nested classes. + */ + private def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { + // type InternalName = String + val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { + def declaredNestedClasses(internalName: InternalName): List[ClassBType] = + bTypes.classBTypeFromInternalName(internalName).info.memberClasses + + def getClassIfNested(internalName: InternalName): Option[ClassBType] = { + val c = bTypes.classBTypeFromInternalName(internalName) + Option.when(c.isNestedClass)(c) + } + + def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { + // don't crash on invalid generic signatures + } + } + c.visit(classNode) + (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) + } + + def run(): Unit = { + while (true) { + val item = q2.poll + if (item.isPoison) { + q3 add poison3 + return + } + else { + try { + val plainNode = item.plain.classNode + localOptimizations(plainNode) + val serializableLambdas = collectSerializableLambdas(plainNode) + if (serializableLambdas.nonEmpty) + addLambdaDeserialize(plainNode, serializableLambdas) + setInnerClasses(plainNode) + setInnerClasses(item.mirror.classNode) + addToQ3(item) + } catch { + case ex: InterruptedException => + throw ex + case ex: Throwable => + println(s"Error while emitting ${item.plain.classNode.name}") + throw ex + } + } + } + } + + private def addToQ3(item: Item2) = { + + def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = { + val cw = new CClassWriter(extraProc) + cn.accept(cw) + cw.toByteArray + } + + val Item2(arrivalPos, SubItem2(mirror, mirrorFile), SubItem2(plain, plainFile)) = item + + val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror), mirrorFile) + val plainC = SubItem3(plain.name, getByteArray(plain), plainFile) + + if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) { + if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes) + AsmUtils.traceClass(plainC.jclassBytes) + } + + q3 add Item3(arrivalPos, mirrorC, plainC) + } + + } // end of class BCodePhase.Worker2 + + var arrivalPos: Int = 0 + + /* + * A run of the BCodePhase phase comprises: + * + * (a) set-up steps (most notably supporting maps in `BCodeTypes`, + * but also "the" writer where class files in byte-array form go) + * + * (b) building of ASM ClassNodes, their optimization and serialization. + * + * (c) tear down (closing the classfile-writer and clearing maps) + * + */ + def run(t: Tree)(using Context): Unit = { + this.tree = t + + // val bcodeStart = Statistics.startTimer(BackendStats.bcodeTimer) + + // val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer) + arrivalPos = 0 // just in case + // scalaPrimitives.init() + bTypes.intializeCoreBTypes() + // Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart) + + // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated. + bytecodeWriter = initBytecodeWriter() + mirrorCodeGen = new JMirrorBuilder + + val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter] + buildAndSendToDisk(needsOutfileForSymbol) + + // closing output files. + bytecodeWriter.close() + // Statistics.stopTimer(BackendStats.bcodeTimer, bcodeStart) + + if (ctx.compilerCallback != null) + ctx.compilerCallback.onSourceCompiled(sourceFile) + + /* TODO Bytecode can be verified (now that all classfiles have been written to disk) + * + * (1) asm.util.CheckAdapter.verify() + * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw) + * passing a custom ClassLoader to verify inter-dependent classes. + * Alternatively, + * - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool). + * - -Xverify:all + * + * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()` + * + */ + } + + /* + * Sequentially: + * (a) place all ClassDefs in queue-1 + * (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2 + * (c) dequeue one at a time from queue-2, convert it to byte-array, place in queue-3 + * (d) serialize to disk by draining queue-3. + */ + private def buildAndSendToDisk(needsOutFolder: Boolean)(using Context) = { + try + feedPipeline1() + // val genStart = Statistics.startTimer(BackendStats.bcodeGenStat) + (new Worker1(needsOutFolder)).run() + // Statistics.stopTimer(BackendStats.bcodeGenStat, genStart) + + (new Worker2).run() + + // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) + drainQ3() + // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) + catch + case e: MethodTooLargeException => + val method = + s"${e.getClassName.replaceAll("/", ".")}.${e.getMethodName}" + val msg = + em"Generated bytecode for method '$method' is too large. Size: ${e.getCodeSize} bytes. Limit is 64KB" + report.error(msg) + case e: ClassTooLargeException => + val msg = + em"Class '${e.getClassName.replaceAll("/", ".")}' is too large. Constant pool size: ${e.getConstantPoolCount}. Limit is 64K entries" + report.error(msg) + + } + + /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */ + private def feedPipeline1() = { + def gen(tree: Tree): Unit = { + tree match { + case EmptyTree => () + case PackageDef(_, stats) => stats foreach gen + case ValDef(name, tpt, rhs) => () // module val not emitted + case cd: TypeDef => + q1 add Item1(arrivalPos, cd, int.ctx.compilationUnit) + arrivalPos += 1 + } + } + gen(tree) + q1 add poison1 + } + + /* Pipeline that writes classfile representations to disk. */ + private def drainQ3() = { + + def sendToDisk(cfr: SubItem3): Unit = { + if (cfr != null){ + val SubItem3(jclassName, jclassBytes, jclassFile) = cfr + bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, jclassFile) + } + } + + var moreComing = true + // `expected` denotes the arrivalPos whose Item3 should be serialized next + var expected = 0 + + while (moreComing) { + val incoming = q3.poll + moreComing = !incoming.isPoison + if (moreComing) { + val item = incoming + sendToDisk(item.mirror) + sendToDisk(item.plain) + expected += 1 + } + } + + // we're done + assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1") + assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2") + assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3") + + } + //} // end of class BCodePhase +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala new file mode 100644 index 000000000000..210e47566cb9 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala @@ -0,0 +1,16 @@ +package dotty.tools +package backend +package jvm + +import scala.tools.asm + +object GenBCodeOps extends GenBCodeOps + +class GenBCodeOps { + extension (flags: Int) + def addFlagIf(cond: Boolean, flag: Int): Int = if cond then flags | flag else flags + + final val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC + final val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL + final val PrivateStaticFinal = asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala b/tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala new file mode 100644 index 000000000000..e9e532933290 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala @@ -0,0 +1,326 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import scala.tools.asm.{ClassReader, Type, Handle } +import scala.tools.asm.tree._ + +import scala.collection.mutable +import scala.util.control.{NoStackTrace, NonFatal} +import scala.annotation._ +import scala.jdk.CollectionConverters._ + +// Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf +// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L928 +abstract class GenericSignatureVisitor(nestedOnly: Boolean) { + // For performance (`Char => Boolean` is not specialized) + private trait CharBooleanFunction { def apply(c: Char): Boolean } + + final def visitInternalName(internalName: String): Unit = visitInternalName(internalName, 0, if (internalName eq null) 0 else internalName.length) + def visitInternalName(internalName: String, offset: Int, length: Int): Unit + + def raiseError(msg: String, sig: String, e: Option[Throwable] = None): Unit + + def visitClassSignature(sig: String): Unit = if (sig != null) { + val p = new Parser(sig, nestedOnly) + p.safely { p.classSignature() } + } + + def visitMethodSignature(sig: String): Unit = if (sig != null) { + val p = new Parser(sig, nestedOnly) + p.safely { p.methodSignature() } + } + + def visitFieldSignature(sig: String): Unit = if (sig != null) { + val p = new Parser(sig, nestedOnly) + p.safely { p.fieldSignature() } + } + + private final class Parser(sig: String, nestedOnly: Boolean) { + + private var index = 0 + private val end = sig.length + + private val Aborted: Throwable = new NoStackTrace { } + private def abort(): Nothing = throw Aborted + + @inline def safely(f: => Unit): Unit = try f catch { + case Aborted => + case NonFatal(e) => raiseError(s"Exception thrown during signature parsing", sig, Some(e)) + } + + private def current = { + if (index >= end) { + raiseError(s"Out of bounds, $index >= $end", sig) + abort() // Don't continue, even if `notifyInvalidSignature` returns + } + sig.charAt(index) + } + + private def accept(c: Char): Unit = { + if (current != c) { + raiseError(s"Expected $c at $index, found $current", sig) + abort() + } + index += 1 + } + + private def skip(): Unit = { index += 1 } + private def getCurrentAndSkip(): Char = { val c = current; skip(); c } + + private def skipUntil(isDelimiter: CharBooleanFunction): Unit = { + while (!isDelimiter(current)) { index += 1 } + } + private def skipUntilDelimiter(delimiter: Char): Unit = { + sig.indexOf(delimiter, index) match { + case -1 => + raiseError(s"Out of bounds", sig) + abort() // Don't continue, even if `notifyInvalidSignature` returns + case i => + index = i + } + } + + private def appendUntil(builder: java.lang.StringBuilder, isDelimiter: CharBooleanFunction): Unit = { + val start = index + skipUntil(isDelimiter) + builder.append(sig, start, index) + } + + def isBaseType(c: Char): Boolean = c match { + case 'B' | 'C' | 'D' | 'F' | 'I' | 'J' | 'S' | 'Z' => true + case _ => false + } + + private val isClassNameEnd: CharBooleanFunction = (c: Char) => c == '<' || c == '.' || c == ';' + + private def typeArguments(): Unit = if (current == '<') { + skip() + while (current != '>') current match { + case '*' | '+' | '-' => + skip() + case _ => + referenceTypeSignature() + } + accept('>') + } + + @tailrec private def referenceTypeSignature(): Unit = getCurrentAndSkip() match { + case 'L' => + var names: java.lang.StringBuilder = null + + val start = index + var seenDollar = false + while (!isClassNameEnd(current)) { + seenDollar ||= current == '$' + index += 1 + } + if ((current == '.' || seenDollar) || !nestedOnly) { + // OPT: avoid allocations when only a top-level class is encountered + names = new java.lang.StringBuilder(32) + names.append(sig, start, index) + visitInternalName(names.toString) + } + typeArguments() + + while (current == '.') { + skip() + names.append('$') + appendUntil(names, isClassNameEnd) + visitInternalName(names.toString) + typeArguments() + } + accept(';') + + case 'T' => + skipUntilDelimiter(';') + skip() + + case '[' => + if (isBaseType(current)) skip() + else referenceTypeSignature() + } + + private def typeParameters(): Unit = if (current == '<') { + skip() + while (current != '>') { + skipUntilDelimiter(':'); skip() + val c = current + // The ClassBound can be missing, but only if there's an InterfaceBound after. + // This is an assumption that's not in the spec, see https://stackoverflow.com/q/44284928 + if (c != ':' && c != '>') { referenceTypeSignature() } + while (current == ':') { skip(); referenceTypeSignature() } + } + accept('>') + } + + def classSignature(): Unit = { + typeParameters() + while (index < end) referenceTypeSignature() + } + + def methodSignature(): Unit = { + typeParameters() + + accept('(') + while (current != ')') { + if (isBaseType(current)) skip() + else referenceTypeSignature() + } + accept(')') + + if (current == 'V' || isBaseType(current)) skip() + else referenceTypeSignature() + + while (index < end) { + accept('^') + referenceTypeSignature() + } + } + + def fieldSignature(): Unit = if (sig != null) safely { + referenceTypeSignature() + } + } +} + +// Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf +// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 +abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSignatureVisitor(nestedOnly) { + type InternalName = String + + def declaredNestedClasses(internalName: InternalName): List[T] + def getClassIfNested(internalName: InternalName): Option[T] + + val declaredInnerClasses = mutable.Set.empty[T] + val referredInnerClasses = mutable.Set.empty[T] + + def innerClasses: collection.Set[T] = declaredInnerClasses ++ referredInnerClasses + def clear(): Unit = { + declaredInnerClasses.clear() + referredInnerClasses.clear() + } + + def visit(classNode: ClassNode): Unit = { + visitInternalName(classNode.name) + declaredInnerClasses ++= declaredNestedClasses(classNode.name) + + visitInternalName(classNode.superName) + classNode.interfaces.asScala foreach visitInternalName + visitInternalName(classNode.outerClass) + + visitAnnotations(classNode.visibleAnnotations) + visitAnnotations(classNode.visibleTypeAnnotations) + visitAnnotations(classNode.invisibleAnnotations) + visitAnnotations(classNode.invisibleTypeAnnotations) + + visitClassSignature(classNode.signature) + + for (f <- classNode.fields.asScala) { + visitDescriptor(f.desc) + visitAnnotations(f.visibleAnnotations) + visitAnnotations(f.visibleTypeAnnotations) + visitAnnotations(f.invisibleAnnotations) + visitAnnotations(f.invisibleTypeAnnotations) + visitFieldSignature(f.signature) + } + + for (m <- classNode.methods.asScala) { + visitDescriptor(m.desc) + + visitAnnotations(m.visibleAnnotations) + visitAnnotations(m.visibleTypeAnnotations) + visitAnnotations(m.invisibleAnnotations) + visitAnnotations(m.invisibleTypeAnnotations) + visitAnnotationss(m.visibleParameterAnnotations) + visitAnnotationss(m.invisibleParameterAnnotations) + visitAnnotations(m.visibleLocalVariableAnnotations) + visitAnnotations(m.invisibleLocalVariableAnnotations) + + m.exceptions.asScala foreach visitInternalName + for (tcb <- m.tryCatchBlocks.asScala) visitInternalName(tcb.`type`) + + val iter = m.instructions.iterator + while (iter.hasNext) iter.next() match { + case ti: TypeInsnNode => visitInternalNameOrArrayReference(ti.desc) + case fi: FieldInsnNode => visitInternalNameOrArrayReference(fi.owner); visitDescriptor(fi.desc) + case mi: MethodInsnNode => visitInternalNameOrArrayReference(mi.owner); visitDescriptor(mi.desc) + case id: InvokeDynamicInsnNode => visitDescriptor(id.desc); visitHandle(id.bsm); id.bsmArgs foreach visitConstant + case ci: LdcInsnNode => visitConstant(ci.cst) + case ma: MultiANewArrayInsnNode => visitDescriptor(ma.desc) + case _ => + } + + visitMethodSignature(m.signature) + } + } + + private def containsChar(s: String, offset: Int, length: Int, char: Char): Boolean = { + val ix = s.indexOf(char, offset) + !(ix == -1 || ix >= offset + length) + } + + def visitInternalName(internalName: String, offset: Int, length: Int): Unit = if (internalName != null && containsChar(internalName, offset, length, '$')) { + for (c <- getClassIfNested(internalName.substring(offset, length))) + if (!declaredInnerClasses.contains(c)) + referredInnerClasses += c + } + + // either an internal/Name or [[Linternal/Name; -- there are certain references in classfiles + // that are either an internal name (without the surrounding `L;`) or an array descriptor + // `[Linternal/Name;`. + def visitInternalNameOrArrayReference(ref: String): Unit = if (ref != null) { + val bracket = ref.lastIndexOf('[') + if (bracket == -1) visitInternalName(ref) + else if (ref.charAt(bracket + 1) == 'L') visitInternalName(ref, bracket + 2, ref.length - 1) + } + + // we are only interested in the class references in the descriptor, so we can skip over + // primitives and the brackets of array descriptors + def visitDescriptor(desc: String): Unit = (desc.charAt(0): @switch) match { + case '(' => + var i = 1 + while (i < desc.length) { + if (desc.charAt(i) == 'L') { + val start = i + 1 // skip the L + var seenDollar = false + while ({val ch = desc.charAt(i); seenDollar ||= (ch == '$'); ch != ';'}) i += 1 + if (seenDollar) + visitInternalName(desc, start, i) + } + // skips over '[', ')', primitives + i += 1 + } + + case 'L' => + visitInternalName(desc, 1, desc.length - 1) + + case '[' => + visitInternalNameOrArrayReference(desc) + + case _ => // skip over primitive types + } + + def visitConstant(const: AnyRef): Unit = const match { + case t: Type => visitDescriptor(t.getDescriptor) + case _ => + } + + // in principle we could references to annotation types, as they only end up as strings in the + // constant pool, not as class references. however, the java compiler still includes nested + // annotation classes in the innerClass table, so we do the same. explained in detail in the + // large comment in class BTypes. + def visitAnnotation(annot: AnnotationNode): Unit = { + visitDescriptor(annot.desc) + if (annot.values != null) annot.values.asScala foreach visitConstant + } + + def visitAnnotations(annots: java.util.List[_ <: AnnotationNode]) = if (annots != null) annots.asScala foreach visitAnnotation + def visitAnnotationss(annotss: Array[java.util.List[AnnotationNode]]) = if (annotss != null) annotss foreach visitAnnotations + + def visitHandle(handle: Handle): Unit = { + visitInternalNameOrArrayReference(handle.getOwner) + visitDescriptor(handle.getDesc) + } +} + diff --git a/tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java b/tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java new file mode 100644 index 000000000000..cf91fe619f5d --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java @@ -0,0 +1,31 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.backend.jvm; + +import scala.tools.asm.Label; +import scala.tools.asm.tree.ClassNode; +import scala.tools.asm.tree.LabelNode; + +/** + * A subclass of {@link LabelNode} to add user-definable flags. + */ +public class LabelNode1 extends LabelNode { + public LabelNode1() { + } + + public LabelNode1(Label label) { + super(label); + } + + public int flags; +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java b/tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java new file mode 100644 index 000000000000..bfa4401830ba --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java @@ -0,0 +1,47 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.backend.jvm; + +import scala.tools.asm.Label; +import scala.tools.asm.Opcodes; +import scala.tools.asm.tree.LabelNode; +import scala.tools.asm.tree.MethodNode; +/** + * A subclass of {@link MethodNode} to customize the representation of + * label nodes with {@link LabelNode1}. + */ +public class MethodNode1 extends MethodNode { + public MethodNode1(int api, int access, String name, String descriptor, String signature, String[] exceptions) { + super(api, access, name, descriptor, signature, exceptions); + } + + public MethodNode1(int access, String name, String descriptor, String signature, String[] exceptions) { + this(Opcodes.ASM6, access, name, descriptor, signature, exceptions); + } + + public MethodNode1(int api) { + super(api); + } + + public MethodNode1() { + this(Opcodes.ASM6); + } + + @Override + protected LabelNode getLabelNode(Label label) { + if (!(label.info instanceof LabelNode)) { + label.info = new LabelNode1(label); + } + return (LabelNode) label.info; + } +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/Primitives.scala b/tests/pos-with-compiler-cc/backend/jvm/Primitives.scala new file mode 100644 index 000000000000..c9ddfeab24e1 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/Primitives.scala @@ -0,0 +1,191 @@ +package dotty.tools +package backend +package jvm + +import java.io.PrintWriter + +object Primitives { + /** This class represents a primitive operation. */ + class Primitive { + } + + /** This class represents a test operation. */ + sealed abstract class TestOp { + + /** Returns the negation of this operation. */ + def negate(): TestOp + + /** Returns a string representation of this operation. */ + override def toString(): String + + /** used only from GenASM */ + def opcodeIF(): Int + + /** used only from GenASM */ + def opcodeIFICMP(): Int + + } + + /** An equality test */ + case object EQ extends TestOp { + def negate() = NE + override def toString() = "EQ" + override def opcodeIF() = scala.tools.asm.Opcodes.IFEQ + override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPEQ + } + + /** A non-equality test */ + case object NE extends TestOp { + def negate() = EQ + override def toString() = "NE" + override def opcodeIF() = scala.tools.asm.Opcodes.IFNE + override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPNE + } + + /** A less-than test */ + case object LT extends TestOp { + def negate() = GE + override def toString() = "LT" + override def opcodeIF() = scala.tools.asm.Opcodes.IFLT + override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLT + } + + /** A greater-than-or-equal test */ + case object GE extends TestOp { + def negate() = LT + override def toString() = "GE" + override def opcodeIF() = scala.tools.asm.Opcodes.IFGE + override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGE + } + + /** A less-than-or-equal test */ + case object LE extends TestOp { + def negate() = GT + override def toString() = "LE" + override def opcodeIF() = scala.tools.asm.Opcodes.IFLE + override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLE + } + + /** A greater-than test */ + case object GT extends TestOp { + def negate() = LE + override def toString() = "GT" + override def opcodeIF() = scala.tools.asm.Opcodes.IFGT + override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGT + } + + /** This class represents an arithmetic operation. */ + class ArithmeticOp { + + /** Returns a string representation of this operation. */ + override def toString(): String = this match { + case ADD => "ADD" + case SUB => "SUB" + case MUL => "MUL" + case DIV => "DIV" + case REM => "REM" + case NOT => "NOT" + case _ => throw new RuntimeException("ArithmeticOp unknown case") + } + } + + /** An arithmetic addition operation */ + case object ADD extends ArithmeticOp + + /** An arithmetic subtraction operation */ + case object SUB extends ArithmeticOp + + /** An arithmetic multiplication operation */ + case object MUL extends ArithmeticOp + + /** An arithmetic division operation */ + case object DIV extends ArithmeticOp + + /** An arithmetic remainder operation */ + case object REM extends ArithmeticOp + + /** Bitwise negation. */ + case object NOT extends ArithmeticOp + + /** This class represents a shift operation. */ + class ShiftOp { + + /** Returns a string representation of this operation. */ + override def toString(): String = this match { + case LSL => "LSL" + case ASR => "ASR" + case LSR => "LSR" + case _ => throw new RuntimeException("ShitOp unknown case") + } + } + + /** A logical shift to the left */ + case object LSL extends ShiftOp + + /** An arithmetic shift to the right */ + case object ASR extends ShiftOp + + /** A logical shift to the right */ + case object LSR extends ShiftOp + + /** This class represents a logical operation. */ + class LogicalOp { + + /** Returns a string representation of this operation. */ + override def toString(): String = this match { + case AND => "AND" + case OR => "OR" + case XOR => "XOR" + case _ => throw new RuntimeException("LogicalOp unknown case") + } + } + + /** A bitwise AND operation */ + case object AND extends LogicalOp + + /** A bitwise OR operation */ + case object OR extends LogicalOp + + /** A bitwise XOR operation */ + case object XOR extends LogicalOp + + /** Signals the beginning of a series of concatenations. + * On the JVM platform, it should create a new StringBuffer + */ + case object StartConcat extends Primitive + + /** + * type: (buf) => STR + * jvm : It should turn the StringBuffer into a String. + */ + case object EndConcat extends Primitive + + /** Pretty printer for primitives */ + class PrimitivePrinter(out: PrintWriter) { + def print(s: String): PrimitivePrinter = { + out.print(s) + this + } + } + + /** This class represents a comparison operation. */ + class ComparisonOp { + + /** Returns a string representation of this operation. */ + override def toString(): String = this match { + case CMPL => "CMPL" + case CMP => "CMP" + case CMPG => "CMPG" + case _ => throw new RuntimeException("ComparisonOp unknown case") + } + } + + /** A comparison operation with -1 default for NaNs */ + case object CMPL extends ComparisonOp + + /** A comparison operation with no default for NaNs */ + case object CMP extends ComparisonOp + + /** A comparison operation with +1 default for NaNs */ + case object CMPG extends ComparisonOp +} diff --git a/tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala b/tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala new file mode 100644 index 000000000000..420ff7b20423 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala @@ -0,0 +1,412 @@ +package dotty.tools +package backend.jvm + +import dotc.ast.Trees.Select +import dotc.ast.tpd._ +import dotc.core._ +import Contexts._ +import Names.TermName, StdNames._ +import Types.{JavaArrayType, UnspecifiedErrorType, Type} +import Symbols.{Symbol, NoSymbol} +import Decorators.em +import dotc.report +import dotc.util.ReadOnlyMap + +import scala.annotation.threadUnsafe + +/** Scala primitive operations are represented as methods in `Any` and + * `AnyVal` subclasses. Here we demultiplex them by providing a mapping + * from their symbols to integers. Different methods exist for + * different value types, but with the same meaning (like plus, minus, + * etc.). They will all be mapped to the same int. + * + * Note: The three equal methods have the following semantics: + * - `"=="` checks for `null`, and if non-null, calls + * `java.lang.Object.equals` + * `(class: Any; modifier: final)`. Primitive: `EQ` + * - `"eq"` usual reference comparison + * `(class: AnyRef; modifier: final)`. Primitive: `ID` + * - `"equals"` user-defined equality (Java semantics) + * `(class: Object; modifier: none)`. Primitive: `EQUALS` + * + * Inspired from the `scalac` compiler. + */ +class DottyPrimitives(ictx: DetachedContext) { + import dotty.tools.backend.ScalaPrimitivesOps._ + + @threadUnsafe private lazy val primitives: ReadOnlyMap[Symbol, Int] = init + + /** Return the code for the given symbol. */ + def getPrimitive(sym: Symbol): Int = { + primitives(sym) + } + + /** + * Return the primitive code of the given operation. If the + * operation is an array get/set, we inspect the type of the receiver + * to demux the operation. + * + * @param fun The method symbol + * @param tpe The type of the receiver object. It is used only for array + * operations + */ + def getPrimitive(app: Apply, tpe: Type)(using Context): Int = { + val fun = app.fun.symbol + val defn = ctx.definitions + val code = app.fun match { + case Select(_, nme.primitive.arrayLength) => + LENGTH + case Select(_, nme.primitive.arrayUpdate) => + UPDATE + case Select(_, nme.primitive.arrayApply) => + APPLY + case _ => getPrimitive(fun) + } + + def elementType: Type = tpe.widenDealias match { + case defn.ArrayOf(el) => el + case JavaArrayType(el) => el + case _ => + report.error(em"expected Array $tpe") + UnspecifiedErrorType + } + + code match { + + case APPLY => + defn.scalaClassName(elementType) match { + case tpnme.Boolean => ZARRAY_GET + case tpnme.Byte => BARRAY_GET + case tpnme.Short => SARRAY_GET + case tpnme.Char => CARRAY_GET + case tpnme.Int => IARRAY_GET + case tpnme.Long => LARRAY_GET + case tpnme.Float => FARRAY_GET + case tpnme.Double => DARRAY_GET + case _ => OARRAY_GET + } + + case UPDATE => + defn.scalaClassName(elementType) match { + case tpnme.Boolean => ZARRAY_SET + case tpnme.Byte => BARRAY_SET + case tpnme.Short => SARRAY_SET + case tpnme.Char => CARRAY_SET + case tpnme.Int => IARRAY_SET + case tpnme.Long => LARRAY_SET + case tpnme.Float => FARRAY_SET + case tpnme.Double => DARRAY_SET + case _ => OARRAY_SET + } + + case LENGTH => + defn.scalaClassName(elementType) match { + case tpnme.Boolean => ZARRAY_LENGTH + case tpnme.Byte => BARRAY_LENGTH + case tpnme.Short => SARRAY_LENGTH + case tpnme.Char => CARRAY_LENGTH + case tpnme.Int => IARRAY_LENGTH + case tpnme.Long => LARRAY_LENGTH + case tpnme.Float => FARRAY_LENGTH + case tpnme.Double => DARRAY_LENGTH + case _ => OARRAY_LENGTH + } + + case _ => + code + } + } + + /** Initialize the primitive map */ + private def init: ReadOnlyMap[Symbol, Int] = { + + given Context = ictx + + import Symbols.defn + val primitives = Symbols.MutableSymbolMap[Int](512) + + /** Add a primitive operation to the map */ + def addPrimitive(s: Symbol, code: Int): Unit = { + assert(!(primitives contains s), "Duplicate primitive " + s) + primitives(s) = code + } + + def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { + val alts = cls.info.member(method).alternatives.map(_.symbol) + if (alts.isEmpty) + report.error(em"Unknown primitive method $cls.$method") + else alts foreach (s => + addPrimitive(s, + s.info.paramInfoss match { + case List(tp :: _) if code == ADD && tp =:= ctx.definitions.StringType => CONCAT + case _ => code + } + ) + ) + } + + // scala.Any + addPrimitive(defn.Any_==, EQ) + addPrimitive(defn.Any_!=, NE) + addPrimitive(defn.Any_isInstanceOf, IS) + addPrimitive(defn.Any_asInstanceOf, AS) + addPrimitive(defn.Any_##, HASH) + + // java.lang.Object + addPrimitive(defn.Object_eq, ID) + addPrimitive(defn.Object_ne, NI) + /* addPrimitive(defn.Any_==, EQ) + addPrimitive(defn.Any_!=, NE)*/ + addPrimitive(defn.Object_synchronized, SYNCHRONIZED) + /*addPrimitive(defn.Any_isInstanceOf, IS) + addPrimitive(defn.Any_asInstanceOf, AS)*/ + + // java.lang.String + addPrimitive(defn.String_+, CONCAT) + + // scala.Array + lazy val ArrayClass = defn.ArrayClass + addPrimitives(ArrayClass, nme.length, LENGTH) + addPrimitives(ArrayClass, nme.apply, APPLY) + addPrimitives(ArrayClass, nme.update, UPDATE) + + // scala.Boolean + lazy val BooleanClass = defn.BooleanClass + addPrimitives(BooleanClass, nme.EQ, EQ) + addPrimitives(BooleanClass, nme.NE, NE) + addPrimitives(BooleanClass, nme.UNARY_!, ZNOT) + addPrimitives(BooleanClass, nme.ZOR, ZOR) + addPrimitives(BooleanClass, nme.ZAND, ZAND) + addPrimitives(BooleanClass, nme.OR, OR) + addPrimitives(BooleanClass, nme.AND, AND) + addPrimitives(BooleanClass, nme.XOR, XOR) + + // scala.Byte + lazy val ByteClass = defn.ByteClass + addPrimitives(ByteClass, nme.EQ, EQ) + addPrimitives(ByteClass, nme.NE, NE) + addPrimitives(ByteClass, nme.ADD, ADD) + addPrimitives(ByteClass, nme.SUB, SUB) + addPrimitives(ByteClass, nme.MUL, MUL) + addPrimitives(ByteClass, nme.DIV, DIV) + addPrimitives(ByteClass, nme.MOD, MOD) + addPrimitives(ByteClass, nme.LT, LT) + addPrimitives(ByteClass, nme.LE, LE) + addPrimitives(ByteClass, nme.GT, GT) + addPrimitives(ByteClass, nme.GE, GE) + addPrimitives(ByteClass, nme.XOR, XOR) + addPrimitives(ByteClass, nme.OR, OR) + addPrimitives(ByteClass, nme.AND, AND) + addPrimitives(ByteClass, nme.LSL, LSL) + addPrimitives(ByteClass, nme.LSR, LSR) + addPrimitives(ByteClass, nme.ASR, ASR) + // conversions + addPrimitives(ByteClass, nme.toByte, B2B) + addPrimitives(ByteClass, nme.toShort, B2S) + addPrimitives(ByteClass, nme.toChar, B2C) + addPrimitives(ByteClass, nme.toInt, B2I) + addPrimitives(ByteClass, nme.toLong, B2L) + // unary methods + addPrimitives(ByteClass, nme.UNARY_+, POS) + addPrimitives(ByteClass, nme.UNARY_-, NEG) + addPrimitives(ByteClass, nme.UNARY_~, NOT) + + addPrimitives(ByteClass, nme.toFloat, B2F) + addPrimitives(ByteClass, nme.toDouble, B2D) + + // scala.Short + lazy val ShortClass = defn.ShortClass + addPrimitives(ShortClass, nme.EQ, EQ) + addPrimitives(ShortClass, nme.NE, NE) + addPrimitives(ShortClass, nme.ADD, ADD) + addPrimitives(ShortClass, nme.SUB, SUB) + addPrimitives(ShortClass, nme.MUL, MUL) + addPrimitives(ShortClass, nme.DIV, DIV) + addPrimitives(ShortClass, nme.MOD, MOD) + addPrimitives(ShortClass, nme.LT, LT) + addPrimitives(ShortClass, nme.LE, LE) + addPrimitives(ShortClass, nme.GT, GT) + addPrimitives(ShortClass, nme.GE, GE) + addPrimitives(ShortClass, nme.XOR, XOR) + addPrimitives(ShortClass, nme.OR, OR) + addPrimitives(ShortClass, nme.AND, AND) + addPrimitives(ShortClass, nme.LSL, LSL) + addPrimitives(ShortClass, nme.LSR, LSR) + addPrimitives(ShortClass, nme.ASR, ASR) + // conversions + addPrimitives(ShortClass, nme.toByte, S2B) + addPrimitives(ShortClass, nme.toShort, S2S) + addPrimitives(ShortClass, nme.toChar, S2C) + addPrimitives(ShortClass, nme.toInt, S2I) + addPrimitives(ShortClass, nme.toLong, S2L) + // unary methods + addPrimitives(ShortClass, nme.UNARY_+, POS) + addPrimitives(ShortClass, nme.UNARY_-, NEG) + addPrimitives(ShortClass, nme.UNARY_~, NOT) + + addPrimitives(ShortClass, nme.toFloat, S2F) + addPrimitives(ShortClass, nme.toDouble, S2D) + + // scala.Char + lazy val CharClass = defn.CharClass + addPrimitives(CharClass, nme.EQ, EQ) + addPrimitives(CharClass, nme.NE, NE) + addPrimitives(CharClass, nme.ADD, ADD) + addPrimitives(CharClass, nme.SUB, SUB) + addPrimitives(CharClass, nme.MUL, MUL) + addPrimitives(CharClass, nme.DIV, DIV) + addPrimitives(CharClass, nme.MOD, MOD) + addPrimitives(CharClass, nme.LT, LT) + addPrimitives(CharClass, nme.LE, LE) + addPrimitives(CharClass, nme.GT, GT) + addPrimitives(CharClass, nme.GE, GE) + addPrimitives(CharClass, nme.XOR, XOR) + addPrimitives(CharClass, nme.OR, OR) + addPrimitives(CharClass, nme.AND, AND) + addPrimitives(CharClass, nme.LSL, LSL) + addPrimitives(CharClass, nme.LSR, LSR) + addPrimitives(CharClass, nme.ASR, ASR) + // conversions + addPrimitives(CharClass, nme.toByte, C2B) + addPrimitives(CharClass, nme.toShort, C2S) + addPrimitives(CharClass, nme.toChar, C2C) + addPrimitives(CharClass, nme.toInt, C2I) + addPrimitives(CharClass, nme.toLong, C2L) + // unary methods + addPrimitives(CharClass, nme.UNARY_+, POS) + addPrimitives(CharClass, nme.UNARY_-, NEG) + addPrimitives(CharClass, nme.UNARY_~, NOT) + addPrimitives(CharClass, nme.toFloat, C2F) + addPrimitives(CharClass, nme.toDouble, C2D) + + // scala.Int + lazy val IntClass = defn.IntClass + addPrimitives(IntClass, nme.EQ, EQ) + addPrimitives(IntClass, nme.NE, NE) + addPrimitives(IntClass, nme.ADD, ADD) + addPrimitives(IntClass, nme.SUB, SUB) + addPrimitives(IntClass, nme.MUL, MUL) + addPrimitives(IntClass, nme.DIV, DIV) + addPrimitives(IntClass, nme.MOD, MOD) + addPrimitives(IntClass, nme.LT, LT) + addPrimitives(IntClass, nme.LE, LE) + addPrimitives(IntClass, nme.GT, GT) + addPrimitives(IntClass, nme.GE, GE) + addPrimitives(IntClass, nme.XOR, XOR) + addPrimitives(IntClass, nme.OR, OR) + addPrimitives(IntClass, nme.AND, AND) + addPrimitives(IntClass, nme.LSL, LSL) + addPrimitives(IntClass, nme.LSR, LSR) + addPrimitives(IntClass, nme.ASR, ASR) + // conversions + addPrimitives(IntClass, nme.toByte, I2B) + addPrimitives(IntClass, nme.toShort, I2S) + addPrimitives(IntClass, nme.toChar, I2C) + addPrimitives(IntClass, nme.toInt, I2I) + addPrimitives(IntClass, nme.toLong, I2L) + // unary methods + addPrimitives(IntClass, nme.UNARY_+, POS) + addPrimitives(IntClass, nme.UNARY_-, NEG) + addPrimitives(IntClass, nme.UNARY_~, NOT) + addPrimitives(IntClass, nme.toFloat, I2F) + addPrimitives(IntClass, nme.toDouble, I2D) + + // scala.Long + lazy val LongClass = defn.LongClass + addPrimitives(LongClass, nme.EQ, EQ) + addPrimitives(LongClass, nme.NE, NE) + addPrimitives(LongClass, nme.ADD, ADD) + addPrimitives(LongClass, nme.SUB, SUB) + addPrimitives(LongClass, nme.MUL, MUL) + addPrimitives(LongClass, nme.DIV, DIV) + addPrimitives(LongClass, nme.MOD, MOD) + addPrimitives(LongClass, nme.LT, LT) + addPrimitives(LongClass, nme.LE, LE) + addPrimitives(LongClass, nme.GT, GT) + addPrimitives(LongClass, nme.GE, GE) + addPrimitives(LongClass, nme.XOR, XOR) + addPrimitives(LongClass, nme.OR, OR) + addPrimitives(LongClass, nme.AND, AND) + addPrimitives(LongClass, nme.LSL, LSL) + addPrimitives(LongClass, nme.LSR, LSR) + addPrimitives(LongClass, nme.ASR, ASR) + // conversions + addPrimitives(LongClass, nme.toByte, L2B) + addPrimitives(LongClass, nme.toShort, L2S) + addPrimitives(LongClass, nme.toChar, L2C) + addPrimitives(LongClass, nme.toInt, L2I) + addPrimitives(LongClass, nme.toLong, L2L) + // unary methods + addPrimitives(LongClass, nme.UNARY_+, POS) + addPrimitives(LongClass, nme.UNARY_-, NEG) + addPrimitives(LongClass, nme.UNARY_~, NOT) + addPrimitives(LongClass, nme.toFloat, L2F) + addPrimitives(LongClass, nme.toDouble, L2D) + + // scala.Float + lazy val FloatClass = defn.FloatClass + addPrimitives(FloatClass, nme.EQ, EQ) + addPrimitives(FloatClass, nme.NE, NE) + addPrimitives(FloatClass, nme.ADD, ADD) + addPrimitives(FloatClass, nme.SUB, SUB) + addPrimitives(FloatClass, nme.MUL, MUL) + addPrimitives(FloatClass, nme.DIV, DIV) + addPrimitives(FloatClass, nme.MOD, MOD) + addPrimitives(FloatClass, nme.LT, LT) + addPrimitives(FloatClass, nme.LE, LE) + addPrimitives(FloatClass, nme.GT, GT) + addPrimitives(FloatClass, nme.GE, GE) + // conversions + addPrimitives(FloatClass, nme.toByte, F2B) + addPrimitives(FloatClass, nme.toShort, F2S) + addPrimitives(FloatClass, nme.toChar, F2C) + addPrimitives(FloatClass, nme.toInt, F2I) + addPrimitives(FloatClass, nme.toLong, F2L) + addPrimitives(FloatClass, nme.toFloat, F2F) + addPrimitives(FloatClass, nme.toDouble, F2D) + // unary methods + addPrimitives(FloatClass, nme.UNARY_+, POS) + addPrimitives(FloatClass, nme.UNARY_-, NEG) + + // scala.Double + lazy val DoubleClass = defn.DoubleClass + addPrimitives(DoubleClass, nme.EQ, EQ) + addPrimitives(DoubleClass, nme.NE, NE) + addPrimitives(DoubleClass, nme.ADD, ADD) + addPrimitives(DoubleClass, nme.SUB, SUB) + addPrimitives(DoubleClass, nme.MUL, MUL) + addPrimitives(DoubleClass, nme.DIV, DIV) + addPrimitives(DoubleClass, nme.MOD, MOD) + addPrimitives(DoubleClass, nme.LT, LT) + addPrimitives(DoubleClass, nme.LE, LE) + addPrimitives(DoubleClass, nme.GT, GT) + addPrimitives(DoubleClass, nme.GE, GE) + // conversions + addPrimitives(DoubleClass, nme.toByte, D2B) + addPrimitives(DoubleClass, nme.toShort, D2S) + addPrimitives(DoubleClass, nme.toChar, D2C) + addPrimitives(DoubleClass, nme.toInt, D2I) + addPrimitives(DoubleClass, nme.toLong, D2L) + addPrimitives(DoubleClass, nme.toFloat, D2F) + addPrimitives(DoubleClass, nme.toDouble, D2D) + // unary methods + addPrimitives(DoubleClass, nme.UNARY_+, POS) + addPrimitives(DoubleClass, nme.UNARY_-, NEG) + + + primitives + } + + def isPrimitive(sym: Symbol): Boolean = + primitives.contains(sym) + + def isPrimitive(fun: Tree): Boolean = + given Context = ictx + primitives.contains(fun.symbol) + || (fun.symbol == NoSymbol // the only trees that do not have a symbol assigned are array.{update,select,length,clone}} + && { + fun match + case Select(_, StdNames.nme.clone_) => false // but array.clone is NOT a primitive op. + case _ => true + }) +} diff --git a/tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala b/tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala new file mode 100644 index 000000000000..1579b4577933 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala @@ -0,0 +1,23 @@ +package dotty.tools.backend.sjs + +import dotty.tools.dotc.core._ +import Contexts._ +import Phases._ + +/** Generates Scala.js IR files for the compilation unit. */ +class GenSJSIR extends Phase { + + override def phaseName: String = GenSJSIR.name + + override def description: String = GenSJSIR.description + + override def isRunnable(using Context): Boolean = + super.isRunnable && ctx.settings.scalajs.value + + def run(using Context): Unit = + new JSCodeGen().run() +} + +object GenSJSIR: + val name: String = "genSJSIR" + val description: String = "generate .sjsir files for Scala.js" diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala new file mode 100644 index 000000000000..87d816e56192 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala @@ -0,0 +1,4897 @@ +package dotty.tools.backend.sjs + +import scala.language.unsafeNulls + +import scala.annotation.switch +import scala.collection.mutable + +import dotty.tools.FatalError +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core._ +import Contexts._ +import Decorators._ +import Flags._ +import Names._ +import NameKinds.DefaultGetterName +import Types._ +import Symbols._ +import Phases._ +import StdNames._ +import TypeErasure.ErasedValueType + +import dotty.tools.dotc.transform.{Erasure, ValueClasses} +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.util.SourcePosition +import dotty.tools.dotc.report + +import org.scalajs.ir +import org.scalajs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} +import org.scalajs.ir.Names.{ClassName, MethodName, SimpleMethodName} +import org.scalajs.ir.OriginalName +import org.scalajs.ir.OriginalName.NoOriginalName +import org.scalajs.ir.Trees.OptimizerHints + +import dotty.tools.dotc.transform.sjs.JSSymUtils._ + +import JSEncoding._ +import ScopedVar.withScopedVars +import annotation.retains + +/** Main codegen for Scala.js IR. + * + * [[GenSJSIR]] creates one instance of `JSCodeGen` per compilation unit. + * The `run()` method processes the whole compilation unit and generates + * `.sjsir` files for it. + * + * There are 4 main levels of translation: + * + * - `genCompilationUnit()` iterates through all the type definitions in the + * compilation unit. Each generated `js.ClassDef` is serialized to an + * `.sjsir` file. + * - `genScalaClass()` and other similar methods generate the skeleton of + * classes. + * - `genMethod()` and similar methods generate the declarations of methods. + * - `genStatOrExpr()` and everything else generate the bodies of methods. + */ +class JSCodeGen()(using genCtx: DetachedContext) { + import JSCodeGen._ + import tpd._ + + val sjsPlatform = dotty.tools.dotc.config.SJSPlatform.sjsPlatform + val jsdefn = JSDefinitions.jsdefn + private val primitives = new JSPrimitives(genCtx) + + val positionConversions = new JSPositions()(using genCtx) + import positionConversions._ + + private val jsExportsGen = new JSExportsGen(this) + + // Some state -------------------------------------------------------------- + + private val lazilyGeneratedAnonClasses = new MutableSymbolMap[TypeDef] + private val generatedClasses = mutable.ListBuffer.empty[js.ClassDef] + private val generatedStaticForwarderClasses = mutable.ListBuffer.empty[(Symbol, js.ClassDef)] + + val currentClassSym: ScopedVar[Symbol] = new ScopedVar[Symbol] + private val currentMethodSym = new ScopedVar[Symbol] + private val localNames = new ScopedVar[LocalNameGenerator] + private val thisLocalVarIdent = new ScopedVar[Option[js.LocalIdent]] + private val isModuleInitialized = new ScopedVar[ScopedVar.VarBox[Boolean]] + private val undefinedDefaultParams = new ScopedVar[mutable.Set[Symbol]] + + /* Contextual JS class value for some operations of nested JS classes that need one. */ + private val contextualJSClassValue = new ScopedVar[Option[js.Tree]](None) + + /** Resets all of the scoped state in the context of `body`. */ + private def resetAllScopedVars[T](body: => T): T = { + withScopedVars( + currentClassSym := null, + currentMethodSym := null, + localNames := null, + thisLocalVarIdent := null, + isModuleInitialized := null, + undefinedDefaultParams := null + ) { + body + } + } + + private def withPerMethodBodyState[A](methodSym: Symbol)(body: => A): A = { + withScopedVars( + currentMethodSym := methodSym, + thisLocalVarIdent := None, + isModuleInitialized := new ScopedVar.VarBox(false), + undefinedDefaultParams := mutable.Set.empty, + ) { + body + } + } + + private def acquireContextualJSClassValue[A](f: Option[js.Tree] => A): A = { + val jsClassValue = contextualJSClassValue.get + withScopedVars( + contextualJSClassValue := None + ) { + f(jsClassValue) + } + } + + def withNewLocalNameScope[A](body: => A): A = { + withScopedVars(localNames := new LocalNameGenerator) { + body + } + } + + /** Implicitly materializes the current local name generator. */ + implicit def implicitLocalNames: LocalNameGenerator = localNames.get + + def currentThisType: jstpe.Type = { + encodeClassType(currentClassSym) match { + case tpe @ jstpe.ClassType(cls) => + jstpe.BoxedClassToPrimType.getOrElse(cls, tpe) + case tpe => + tpe + } + } + + /** Returns a new fresh local identifier. */ + private def freshLocalIdent()(implicit pos: Position): js.LocalIdent = + localNames.get.freshLocalIdent() + + /** Returns a new fresh local identifier. */ + def freshLocalIdent(base: String)(implicit pos: Position): js.LocalIdent = + localNames.get.freshLocalIdent(base) + + /** Returns a new fresh local identifier. */ + private def freshLocalIdent(base: TermName)(implicit pos: Position): js.LocalIdent = + localNames.get.freshLocalIdent(base) + + private def consumeLazilyGeneratedAnonClass(sym: Symbol): TypeDef = { + val typeDef = lazilyGeneratedAnonClasses.remove(sym) + if (typeDef == null) { + throw new FatalError( + i"Could not find tree for lazily generated anonymous class ${sym.fullName} at ${sym.sourcePos}") + } else { + typeDef + } + } + + // Compilation unit -------------------------------------------------------- + + def run(): Unit = { + try { + genCompilationUnit(ctx.compilationUnit) + } finally { + generatedClasses.clear() + generatedStaticForwarderClasses.clear() + } + } + + /** Generates the Scala.js IR for a compilation unit + * This method iterates over all the class and interface definitions + * found in the compilation unit and emits their IR (.sjsir). + * + * Some classes are never actually emitted: + * - Classes representing primitive types + * - The scala.Array class + * + * TODO Some classes representing anonymous functions are not actually emitted. + * Instead, a temporary representation of their `apply` method is built + * and recorded, so that it can be inlined as a JavaScript anonymous + * function in the method that instantiates it. + * + * Other ClassDefs are emitted according to their nature: + * * Non-native JS class -> `genNonNativeJSClass()` + * * Other JS type (<: js.Any) -> `genRawJSClassData()` + * * Interface -> `genInterface()` + * * Normal class -> `genClass()` + */ + private def genCompilationUnit(cunit: CompilationUnit): Unit = { + def collectTypeDefs(tree: Tree): List[TypeDef] = { + tree match { + case EmptyTree => Nil + case PackageDef(_, stats) => stats.flatMap(collectTypeDefs) + case cd: TypeDef => cd :: Nil + case _: ValDef => Nil // module instance + } + } + val allTypeDefs = collectTypeDefs(cunit.tpdTree) + + /* #13221 Set JavaStatic on all the Module fields of static module classes. + * This is necessary for `desugarIdent` not to crash in some obscure + * scenarios. + * + * !!! Part of this logic is duplicated in BCodeSkelBuilder.genPlainClass + * + * However, here we only do this for Module fields, not all fields. + */ + for (typeDef <- allTypeDefs) { + if (typeDef.symbol.is(ModuleClass)) { + typeDef.symbol.info.decls.foreach { f => + if (f.isField && f.is(Module)) + f.setFlag(JavaStatic) + } + } + } + + val (anonJSClassTypeDefs, otherTypeDefs) = + allTypeDefs.partition(td => td.symbol.isAnonymousClass && td.symbol.isJSType) + + // Record the TypeDefs of anonymous JS classes to be lazily generated + for (td <- anonJSClassTypeDefs) + lazilyGeneratedAnonClasses(td.symbol) = td + + /* Finally, we emit true code for the remaining class defs. */ + for (td <- otherTypeDefs) { + val sym = td.symbol + implicit val pos: Position = sym.span + + /* Do not actually emit code for primitive types nor scala.Array. */ + val isPrimitive = + sym.isPrimitiveValueClass || sym == defn.ArrayClass + + if (!isPrimitive) { + withScopedVars( + currentClassSym := sym + ) { + val tree = if (sym.isJSType) { + if (!sym.is(Trait) && sym.isNonNativeJSClass) + genNonNativeJSClass(td) + else + genRawJSClassData(td) + } else if (sym.is(Trait)) { + genInterface(td) + } else { + genScalaClass(td) + } + + generatedClasses += tree + } + } + } + + for (tree <- generatedClasses) + genIRFile(cunit, tree) + + if (generatedStaticForwarderClasses.nonEmpty) { + /* #4148 Add generated static forwarder classes, except those that + * would collide with regular classes on case insensitive file systems. + */ + + /* I could not find any reference anywhere about what locale is used + * by case insensitive file systems to compare case-insensitively. + * In doubt, force the English locale, which is probably going to do + * the right thing in virtually all cases (especially if users stick + * to ASCII class names), and it has the merit of being deterministic, + * as opposed to using the OS' default locale. + * The JVM backend performs a similar test to emit a warning for + * conflicting top-level classes. However, it uses `toLowerCase()` + * without argument, which is not deterministic. + */ + def caseInsensitiveNameOf(classDef: js.ClassDef): String = + classDef.name.name.nameString.toLowerCase(java.util.Locale.ENGLISH) + + val generatedCaseInsensitiveNames = + generatedClasses.map(caseInsensitiveNameOf).toSet + + for ((site, classDef) <- generatedStaticForwarderClasses) { + if (!generatedCaseInsensitiveNames.contains(caseInsensitiveNameOf(classDef))) { + genIRFile(cunit, classDef) + } else { + report.warning( + s"Not generating the static forwarders of ${classDef.name.name.nameString} " + + "because its name differs only in case from the name of another class or trait in this compilation unit.", + site.srcPos) + } + } + } + } + + private def genIRFile(cunit: CompilationUnit, tree: ir.Trees.ClassDef): Unit = { + val outfile = getFileFor(cunit, tree.name.name, ".sjsir") + val output = outfile.bufferedOutput + try { + ir.Serializers.serialize(output, tree) + } finally { + output.close() + } + } + + private def getFileFor(cunit: CompilationUnit, className: ClassName, + suffix: String): dotty.tools.io.AbstractFile = { + val outputDirectory = ctx.settings.outputDir.value + val pathParts = className.nameString.split('.') + val dir = pathParts.init.foldLeft(outputDirectory)(_.subdirectoryNamed(_)) + val filename = pathParts.last + dir.fileNamed(filename + suffix) + } + + // Generate a class -------------------------------------------------------- + + /** Gen the IR ClassDef for a Scala class definition (maybe a module class). + */ + private def genScalaClass(td: TypeDef): js.ClassDef = { + val sym = td.symbol.asClass + implicit val pos: SourcePosition = sym.sourcePos + + assert(!sym.is(Trait), + "genScalaClass() must be called only for normal classes: "+sym) + assert(sym.superClass != NoSymbol, sym) + + if (hasDefaultCtorArgsAndJSModule(sym)) { + report.error( + "Implementation restriction: " + + "constructors of Scala classes cannot have default parameters if their companion module is JS native.", + td) + } + + val classIdent = encodeClassNameIdent(sym) + val originalName = originalNameOfClass(sym) + val isHijacked = false //isHijackedBoxedClass(sym) + + // Optimizer hints + + val isDynamicImportThunk = sym.isSubClass(jsdefn.DynamicImportThunkClass) + + def isStdLibClassWithAdHocInlineAnnot(sym: Symbol): Boolean = { + val fullName = sym.fullName.toString + (fullName.startsWith("scala.Tuple") && !fullName.endsWith("$")) || + (fullName.startsWith("scala.collection.mutable.ArrayOps$of")) + } + + val shouldMarkInline = ( + isDynamicImportThunk || + sym.hasAnnotation(jsdefn.InlineAnnot) || + (sym.isAnonymousFunction && !sym.isSubClass(defn.PartialFunctionClass)) || + isStdLibClassWithAdHocInlineAnnot(sym)) + + val optimizerHints = { + OptimizerHints.empty + .withInline(shouldMarkInline) + .withNoinline(sym.hasAnnotation(jsdefn.NoinlineAnnot)) + } + + // Generate members (constructor + methods) + + val generatedNonFieldMembers = new mutable.ListBuffer[js.MemberDef] + + val tpl = td.rhs.asInstanceOf[Template] + for (tree <- tpl.constr :: tpl.body) { + tree match { + case EmptyTree => () + + case vd: ValDef => + // fields are added via genClassFields(), but we need to generate the JS native members + val sym = vd.symbol + if (!sym.is(Module) && sym.hasAnnotation(jsdefn.JSNativeAnnot)) + generatedNonFieldMembers += genJSNativeMemberDef(vd) + + case dd: DefDef => + val sym = dd.symbol + if sym.hasAnnotation(jsdefn.JSNativeAnnot) then + if !sym.is(Accessor) then + generatedNonFieldMembers += genJSNativeMemberDef(dd) + else + generatedNonFieldMembers ++= genMethod(dd) + + case _ => + throw new FatalError("Illegal tree in body of genScalaClass(): " + tree) + } + } + + // Generate fields and add to methods + ctors + val generatedMembers = genClassFields(td) ++ generatedNonFieldMembers.toList + + // Generate member exports + val memberExports = jsExportsGen.genMemberExports(sym) + + // Generate top-level export definitions + val topLevelExportDefs = jsExportsGen.genTopLevelExports(sym) + + // Static initializer + val optStaticInitializer = { + // Initialization of reflection data, if required + val reflectInit = { + val enableReflectiveInstantiation = { + sym.baseClasses.exists { ancestor => + ancestor.hasAnnotation(jsdefn.EnableReflectiveInstantiationAnnot) + } + } + if (enableReflectiveInstantiation) + genRegisterReflectiveInstantiation(sym).toList + else + Nil + } + + // Initialization of the module because of field exports + val needsStaticModuleInit = + topLevelExportDefs.exists(_.isInstanceOf[js.TopLevelFieldExportDef]) + val staticModuleInit = + if (!needsStaticModuleInit) Nil + else List(genLoadModule(sym)) + + val staticInitializerStats = reflectInit ::: staticModuleInit + if (staticInitializerStats.nonEmpty) + List(genStaticConstructorWithStats(ir.Names.StaticInitializerName, js.Block(staticInitializerStats))) + else + Nil + } + + val optDynamicImportForwarder = + if (isDynamicImportThunk) List(genDynamicImportForwarder(sym)) + else Nil + + val allMemberDefsExceptStaticForwarders = + generatedMembers ::: memberExports ::: optStaticInitializer ::: optDynamicImportForwarder + + // Add static forwarders + val allMemberDefs = if (!isCandidateForForwarders(sym)) { + allMemberDefsExceptStaticForwarders + } else { + if (isStaticModule(sym)) { + /* If the module class has no linked class, we must create one to + * hold the static forwarders. Otherwise, this is going to be handled + * when generating the companion class. + */ + if (!sym.linkedClass.exists) { + val forwarders = genStaticForwardersFromModuleClass(Nil, sym) + if (forwarders.nonEmpty) { + val forwardersClassDef = js.ClassDef( + js.ClassIdent(ClassName(classIdent.name.nameString.stripSuffix("$"))), + originalName, + ClassKind.Class, + None, + Some(js.ClassIdent(ir.Names.ObjectClass)), + Nil, + None, + None, + forwarders, + Nil + )(js.OptimizerHints.empty) + generatedStaticForwarderClasses += sym -> forwardersClassDef + } + } + allMemberDefsExceptStaticForwarders + } else { + val forwarders = genStaticForwardersForClassOrInterface( + allMemberDefsExceptStaticForwarders, sym) + allMemberDefsExceptStaticForwarders ::: forwarders + } + } + + // Hashed definitions of the class + val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) + + // The complete class definition + val kind = + if (isStaticModule(sym)) ClassKind.ModuleClass + else if (isHijacked) ClassKind.HijackedClass + else ClassKind.Class + + val classDefinition = js.ClassDef( + classIdent, + originalName, + kind, + None, + Some(encodeClassNameIdent(sym.superClass)), + genClassInterfaces(sym, forJSClass = false), + None, + None, + hashedDefs, + topLevelExportDefs)( + optimizerHints) + + classDefinition + } + + /** Gen the IR ClassDef for a Scala.js-defined JS class. */ + private def genNonNativeJSClass(td: TypeDef): js.ClassDef = { + val sym = td.symbol.asClass + implicit val pos: SourcePosition = sym.sourcePos + + assert(sym.isNonNativeJSClass, + i"genNonNativeJSClass() must be called only for non-native JS classes: $sym") + assert(sym.superClass != NoSymbol, sym) + + if (hasDefaultCtorArgsAndJSModule(sym)) { + report.error( + "Implementation restriction: " + + "constructors of non-native JS classes cannot have default parameters if their companion module is JS native.", + td) + } + + val classIdent = encodeClassNameIdent(sym) + val originalName = originalNameOfClass(sym) + + // Generate members (constructor + methods) + + val constructorTrees = new mutable.ListBuffer[DefDef] + val generatedMethods = new mutable.ListBuffer[js.MethodDef] + val dispatchMethodNames = new mutable.ListBuffer[JSName] + + val tpl = td.rhs.asInstanceOf[Template] + for (tree <- tpl.constr :: tpl.body) { + tree match { + case EmptyTree => () + + case _: ValDef => + () // fields are added via genClassFields() + + case dd: DefDef => + val sym = dd.symbol + val exposed = sym.isJSExposed + + if (sym.isClassConstructor) { + constructorTrees += dd + } else if (exposed && sym.is(Accessor, butNot = Lazy)) { + // Exposed accessors must not be emitted, since the field they access is enough. + } else if (sym.hasAnnotation(jsdefn.JSOptionalAnnot)) { + // Optional methods must not be emitted + } else { + generatedMethods ++= genMethod(dd) + + // Collect the names of the dispatchers we have to create + if (exposed && !sym.is(Deferred)) { + /* We add symbols that we have to expose here. This way we also + * get inherited stuff that is implemented in this class. + */ + dispatchMethodNames += sym.jsName + } + } + + case _ => + throw new FatalError("Illegal tree in gen of genNonNativeJSClass(): " + tree) + } + } + + // Static members (exported from the companion object) + val staticMembers = { + val module = sym.companionModule + if (!module.exists) { + Nil + } else { + val companionModuleClass = module.moduleClass + val exports = withScopedVars(currentClassSym := companionModuleClass) { + jsExportsGen.genStaticExports(companionModuleClass) + } + if (exports.exists(_.isInstanceOf[js.JSFieldDef])) { + val classInitializer = + genStaticConstructorWithStats(ir.Names.ClassInitializerName, genLoadModule(companionModuleClass)) + exports :+ classInitializer + } else { + exports + } + } + } + + val topLevelExports = jsExportsGen.genTopLevelExports(sym) + + val (generatedConstructor, jsClassCaptures) = withNewLocalNameScope { + val isNested = sym.isNestedJSClass + + if (isNested) + localNames.reserveLocalName(JSSuperClassParamName) + + val (captures, ctor) = genJSClassCapturesAndConstructor(constructorTrees.toList) + + val jsClassCaptures = if (isNested) { + val superParam = js.ParamDef(js.LocalIdent(JSSuperClassParamName), + NoOriginalName, jstpe.AnyType, mutable = false) + Some(superParam :: captures) + } else { + assert(captures.isEmpty, s"found non nested JS class with captures $captures at $pos") + None + } + + (ctor, jsClassCaptures) + } + + // Generate fields (and add to methods + ctors) + val generatedMembers = { + genClassFields(td) ::: + generatedConstructor :: + jsExportsGen.genJSClassDispatchers(sym, dispatchMethodNames.result().distinct) ::: + generatedMethods.toList ::: + staticMembers + } + + // Hashed definitions of the class + val hashedMemberDefs = ir.Hashers.hashMemberDefs(generatedMembers) + + // The complete class definition + val kind = + if (isStaticModule(sym)) ClassKind.JSModuleClass + else ClassKind.JSClass + + val classDefinition = js.ClassDef( + classIdent, + originalNameOfClass(sym), + kind, + jsClassCaptures, + Some(encodeClassNameIdent(sym.superClass)), + genClassInterfaces(sym, forJSClass = true), + jsSuperClass = jsClassCaptures.map(_.head.ref), + None, + hashedMemberDefs, + topLevelExports)( + OptimizerHints.empty) + + classDefinition + } + + /** Gen the IR ClassDef for a raw JS class or trait. + */ + private def genRawJSClassData(td: TypeDef): js.ClassDef = { + val sym = td.symbol.asClass + implicit val pos: Position = sym.span + + val classIdent = encodeClassNameIdent(sym) + val kind = { + if (sym.is(Trait)) ClassKind.AbstractJSType + else if (sym.is(ModuleClass)) ClassKind.NativeJSModuleClass + else ClassKind.NativeJSClass + } + val superClass = + if (sym.is(Trait)) None + else Some(encodeClassNameIdent(sym.superClass)) + val jsNativeLoadSpec = computeJSNativeLoadSpecOfClass(sym) + + js.ClassDef( + classIdent, + originalNameOfClass(sym), + kind, + None, + superClass, + genClassInterfaces(sym, forJSClass = false), + None, + jsNativeLoadSpec, + Nil, + Nil)( + OptimizerHints.empty) + } + + /** Gen the IR ClassDef for an interface definition. + */ + private def genInterface(td: TypeDef): js.ClassDef = { + val sym = td.symbol.asClass + implicit val pos: SourcePosition = sym.sourcePos + + val classIdent = encodeClassNameIdent(sym) + + val generatedMethods = new mutable.ListBuffer[js.MethodDef] + + val tpl = td.rhs.asInstanceOf[Template] + for (tree <- tpl.constr :: tpl.body) { + tree match { + case EmptyTree => () + case dd: DefDef => generatedMethods ++= genMethod(dd) + case _ => + throw new FatalError( + i"""Illegal tree in gen of genInterface(): $tree + |class = $td + |in ${ctx.compilationUnit}""") + } + } + + val superInterfaces = genClassInterfaces(sym, forJSClass = false) + + val genMethodsList = generatedMethods.toList + val allMemberDefs = + if (!isCandidateForForwarders(sym)) genMethodsList + else genMethodsList ::: genStaticForwardersForClassOrInterface(genMethodsList, sym) + + // Hashed definitions of the interface + val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) + + js.ClassDef( + classIdent, + originalNameOfClass(sym), + ClassKind.Interface, + None, + None, + superInterfaces, + None, + None, + hashedDefs, + Nil)( + OptimizerHints.empty) + } + + private def genClassInterfaces(sym: ClassSymbol, forJSClass: Boolean)( + implicit pos: Position): List[js.ClassIdent] = { + for { + intf <- sym.directlyInheritedTraits + if !(forJSClass && intf == defn.DynamicClass) + } yield { + encodeClassNameIdent(intf) + } + } + + // Static forwarders ------------------------------------------------------- + + /* This mimics the logic in BCodeHelpers.addForwarders and the code that + * calls it, except that we never have collisions with existing methods in + * the companion class. This is because in the IR, only methods with the + * same `MethodName` (including signature) and that are also + * `PublicStatic` would collide. There should never be an actual collision + * because the only `PublicStatic` methods that are otherwise generated are + * the bodies of SAMs, which have mangled names. If that assumption is + * broken, an error message is emitted asking the user to report a bug. + * + * It is important that we always emit forwarders, because some Java APIs + * actually have a public static method and a public instance method with + * the same name. For example the class `Integer` has a + * `def hashCode(): Int` and a `static def hashCode(Int): Int`. The JVM + * back-end considers them as colliding because they have the same name, + * but we must not. + * + * By default, we only emit forwarders for top-level objects, like the JVM + * back-end. However, if requested via a compiler option, we enable them + * for all static objects. This is important so we can implement static + * methods of nested static classes of JDK APIs (see scala-js/#3950). + */ + + /** Is the given Scala class, interface or module class a candidate for + * static forwarders? + * + * - the flag `-XnoForwarders` is not set to true, and + * - the symbol is static, and + * - either of both of the following is true: + * - the flag `-scalajsGenStaticForwardersForNonTopLevelObjects` is set to true, or + * - the symbol was originally at the package level + * + * Other than the Scala.js-specific flag, and the fact that we also consider + * interfaces, this performs the same tests as the JVM back-end. + */ + def isCandidateForForwarders(sym: Symbol): Boolean = { + !ctx.settings.XnoForwarders.value && sym.isStatic && { + ctx.settings.scalajsGenStaticForwardersForNonTopLevelObjects.value || { + atPhase(flattenPhase) { + toDenot(sym).owner.is(PackageClass) + } + } + } + } + + /** Gen the static forwarders to the members of a class or interface for + * methods of its companion object. + * + * This is only done if there exists a companion object and it is not a JS + * type. + * + * Precondition: `isCandidateForForwarders(sym)` is true + */ + def genStaticForwardersForClassOrInterface( + existingMembers: List[js.MemberDef], sym: Symbol)( + implicit pos: SourcePosition): List[js.MemberDef] = { + val module = sym.companionModule + if (!module.exists) { + Nil + } else { + val moduleClass = module.moduleClass + if (!moduleClass.isJSType) + genStaticForwardersFromModuleClass(existingMembers, moduleClass) + else + Nil + } + } + + /** Gen the static forwarders for the methods of a module class. + * + * Precondition: `isCandidateForForwarders(moduleClass)` is true + */ + def genStaticForwardersFromModuleClass(existingMembers: List[js.MemberDef], + moduleClass: Symbol)( + implicit pos: SourcePosition): List[js.MemberDef] = { + + assert(moduleClass.is(ModuleClass), moduleClass) + + val existingPublicStaticMethodNames = existingMembers.collect { + case js.MethodDef(flags, name, _, _, _, _) + if flags.namespace == js.MemberNamespace.PublicStatic => + name.name + }.toSet + + val members = { + moduleClass.info.membersBasedOnFlags(required = Flags.Method, + excluded = Flags.ExcludedForwarder).map(_.symbol) + } + + def isExcluded(m: Symbol): Boolean = { + def hasAccessBoundary = m.accessBoundary(defn.RootClass) ne defn.RootClass + + def isOfJLObject: Boolean = m.owner eq defn.ObjectClass + + def isDefaultParamOfJSNativeDef: Boolean = { + m.name.is(DefaultGetterName) && { + val info = new DefaultParamInfo(m) + !info.isForConstructor && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) + } + } + + m.is(Deferred) + || m.isConstructor + || hasAccessBoundary + || isOfJLObject + || m.hasAnnotation(jsdefn.JSNativeAnnot) || isDefaultParamOfJSNativeDef // #4557 + } + + val forwarders = for { + m <- members + if !isExcluded(m) + } yield { + withNewLocalNameScope { + val flags = js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic) + val methodIdent = encodeMethodSym(m) + val originalName = originalNameOfMethod(m) + val jsParams = for { + (paramName, paramInfo) <- m.info.paramNamess.flatten.zip(m.info.paramInfoss.flatten) + } yield { + js.ParamDef(freshLocalIdent(paramName), NoOriginalName, + toIRType(paramInfo), mutable = false) + } + val resultType = toIRType(m.info.resultType) + + if (existingPublicStaticMethodNames.contains(methodIdent.name)) { + report.error( + "Unexpected situation: found existing public static method " + + s"${methodIdent.name.nameString} in the companion class of " + + s"${moduleClass.fullName}; cannot generate a static forwarder " + + "the method of the same name in the object." + + "Please report this as a bug in the Scala.js support in dotty.", + pos) + } + + js.MethodDef(flags, methodIdent, originalName, jsParams, resultType, Some { + genApplyMethod(genLoadModule(moduleClass), m, jsParams.map(_.ref)) + })(OptimizerHints.empty, None) + } + } + + forwarders.toList + } + + // Generate the fields of a class ------------------------------------------ + + /** Gen definitions for the fields of a class. */ + private def genClassFields(td: TypeDef): List[js.MemberDef] = { + val classSym = td.symbol.asClass + assert(currentClassSym.get == classSym, + "genClassFields called with a ClassDef other than the current one") + + val isJSClass = classSym.isNonNativeJSClass + + // Term members that are neither methods nor modules are fields + classSym.info.decls.filter { f => + !f.isOneOf(MethodOrModule) && f.isTerm + && !f.hasAnnotation(jsdefn.JSNativeAnnot) + && !f.hasAnnotation(jsdefn.JSOptionalAnnot) + && !f.hasAnnotation(jsdefn.JSExportStaticAnnot) + }.flatMap({ f => + implicit val pos = f.span + + val isTopLevelExport = f.hasAnnotation(jsdefn.JSExportTopLevelAnnot) + val isJavaStatic = f.is(JavaStatic) + assert(!(isTopLevelExport && isJavaStatic), + em"found ${f.fullName} which is both a top-level export and a Java static") + val isStaticField = isTopLevelExport || isJavaStatic + + val namespace = if isStaticField then js.MemberNamespace.PublicStatic else js.MemberNamespace.Public + val mutable = isStaticField || f.is(Mutable) + + val flags = js.MemberFlags.empty.withMutable(mutable).withNamespace(namespace) + + val irTpe0 = + if (isJSClass) genExposedFieldIRType(f) + else if (isTopLevelExport) jstpe.AnyType + else toIRType(f.info) + + // scala-js/#4370 Fields cannot have type NothingType + val irTpe = + if (irTpe0 == jstpe.NothingType) encodeClassType(defn.NothingClass) + else irTpe0 + + if (isJSClass && f.isJSExposed) + js.JSFieldDef(flags, genExpr(f.jsName)(f.sourcePos), irTpe) :: Nil + else + val fieldIdent = encodeFieldSym(f) + val originalName = originalNameOfField(f) + val fieldDef = js.FieldDef(flags, fieldIdent, originalName, irTpe) + val optionalStaticFieldGetter = + if isJavaStatic then + // Here we are generating a public static getter for the static field, + // this is its API for other units. This is necessary for singleton + // enum values, which are backed by static fields. + val className = encodeClassName(classSym) + val body = js.Block( + js.LoadModule(className), + js.SelectStatic(className, fieldIdent)(irTpe)) + js.MethodDef(js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), + encodeStaticMemberSym(f), originalName, Nil, irTpe, + Some(body))( + OptimizerHints.empty, None) :: Nil + else + Nil + fieldDef :: optionalStaticFieldGetter + }).toList + } + + def genExposedFieldIRType(f: Symbol): jstpe.Type = { + val tpeEnteringPosterasure = atPhase(elimErasedValueTypePhase)(f.info) + tpeEnteringPosterasure match { + case tpe: ErasedValueType => + /* Here, we must store the field as the boxed representation of + * the value class. The default value of that field, as + * initialized at the time the instance is created, will + * therefore be null. This will not match the behavior we would + * get in a Scala class. To match the behavior, we would need to + * initialized to an instance of the boxed representation, with + * an underlying value set to the zero of its type. However we + * cannot implement that, so we live with the discrepancy. + * + * In dotc this is usually not an issue, because it unboxes `null` to + * the zero of the underlying type, unlike scalac which throws an NPE. + */ + jstpe.ClassType(encodeClassName(tpe.tycon.typeSymbol)) + + case _ => + // Other types are not boxed, so we can initialized them to their true zero. + toIRType(f.info) + } + } + + // Static initializers ----------------------------------------------------- + + private def genStaticConstructorWithStats(name: MethodName, stats: js.Tree)( + implicit pos: Position): js.MethodDef = { + js.MethodDef( + js.MemberFlags.empty.withNamespace(js.MemberNamespace.StaticConstructor), + js.MethodIdent(name), + NoOriginalName, + Nil, + jstpe.NoType, + Some(stats))( + OptimizerHints.empty, None) + } + + private def genRegisterReflectiveInstantiation(sym: Symbol)( + implicit pos: SourcePosition): Option[js.Tree] = { + if (isStaticModule(sym)) + genRegisterReflectiveInstantiationForModuleClass(sym) + else if (sym.is(ModuleClass)) + None // scala-js#3228 + else if (sym.is(Lifted) && !sym.originalOwner.isClass) + None // scala-js#3227 + else + genRegisterReflectiveInstantiationForNormalClass(sym) + } + + private def genRegisterReflectiveInstantiationForModuleClass(sym: Symbol)( + implicit pos: SourcePosition): Option[js.Tree] = { + val fqcnArg = js.StringLiteral(sym.fullName.toString) + val runtimeClassArg = js.ClassOf(toTypeRef(sym.info)) + val loadModuleFunArg = + js.Closure(arrow = true, Nil, Nil, None, genLoadModule(sym), Nil) + + val stat = genApplyMethod( + genLoadModule(jsdefn.ReflectModule), + jsdefn.Reflect_registerLoadableModuleClass, + List(fqcnArg, runtimeClassArg, loadModuleFunArg)) + + Some(stat) + } + + private def genRegisterReflectiveInstantiationForNormalClass(sym: Symbol)( + implicit pos: SourcePosition): Option[js.Tree] = { + val ctors = + if (sym.is(Abstract)) Nil + else sym.info.member(nme.CONSTRUCTOR).alternatives.map(_.symbol).filter(m => !m.isOneOf(Private | Protected)) + + if (ctors.isEmpty) { + None + } else { + val constructorsInfos = for { + ctor <- ctors + } yield { + withNewLocalNameScope { + val (parameterTypes, formalParams, actualParams) = (for { + (paramName, paramInfo) <- ctor.info.paramNamess.flatten.zip(ctor.info.paramInfoss.flatten) + } yield { + val paramType = js.ClassOf(toTypeRef(paramInfo)) + val paramDef = js.ParamDef(freshLocalIdent(paramName), + NoOriginalName, jstpe.AnyType, mutable = false) + val actualParam = unbox(paramDef.ref, paramInfo) + (paramType, paramDef, actualParam) + }).unzip3 + + val paramTypesArray = js.JSArrayConstr(parameterTypes) + + val newInstanceFun = js.Closure(arrow = true, Nil, formalParams, None, { + js.New(encodeClassName(sym), encodeMethodSym(ctor), actualParams) + }, Nil) + + js.JSArrayConstr(List(paramTypesArray, newInstanceFun)) + } + } + + val fqcnArg = js.StringLiteral(sym.fullName.toString) + val runtimeClassArg = js.ClassOf(toTypeRef(sym.info)) + val ctorsInfosArg = js.JSArrayConstr(constructorsInfos) + + val stat = genApplyMethod( + genLoadModule(jsdefn.ReflectModule), + jsdefn.Reflect_registerInstantiatableClass, + List(fqcnArg, runtimeClassArg, ctorsInfosArg)) + + Some(stat) + } + } + + // Constructor of a non-native JS class ------------------------------------ + + def genJSClassCapturesAndConstructor(constructorTrees: List[DefDef])( + implicit pos: SourcePosition): (List[js.ParamDef], js.JSConstructorDef) = { + /* We need to merge all Scala constructors into a single one because the + * IR, like JavaScript, only allows a single one. + * + * We do this by applying: + * 1. Applying runtime type based dispatch, just like exports. + * 2. Splitting secondary ctors into parts before and after the `this` call. + * 3. Topo-sorting all constructor statements and including/excluding + * them based on the overload that was chosen. + */ + + val (primaryTree :: Nil, secondaryTrees) = + constructorTrees.partition(_.symbol.isPrimaryConstructor): @unchecked + + val primaryCtor = genPrimaryJSClassCtor(primaryTree) + val secondaryCtors = secondaryTrees.map(genSecondaryJSClassCtor(_)) + + // VarDefs for the parameters of all constructors. + val paramVarDefs = for { + vparam <- constructorTrees.flatMap(_.paramss.flatten) + } yield { + val sym = vparam.symbol + val tpe = toIRType(sym.info) + js.VarDef(encodeLocalSym(sym), originalNameOfLocal(sym), tpe, mutable = true, jstpe.zeroOf(tpe))(vparam.span) + } + + /* organize constructors in a called-by tree + * (the implicit root is the primary constructor) + */ + val ctorTree = { + val ctorToChildren = secondaryCtors + .groupBy(_.targetCtor) + .withDefaultValue(Nil) + + /* when constructing the call-by tree, we use pre-order traversal to + * assign overload numbers. + * this puts all descendants of a ctor in a range of overloads numbers. + * + * this property is useful, later, when we need to make statements + * conditional based on the chosen overload. + */ + var nextOverloadNum = 0 + def subTree[T <: JSCtor](ctor: T): ConstructorTree[T] = { + val overloadNum = nextOverloadNum + nextOverloadNum += 1 + val subtrees = ctorToChildren(ctor.sym).map(subTree(_)) + new ConstructorTree(overloadNum, ctor, subtrees) + } + + subTree(primaryCtor) + } + + /* prepare overload dispatch for all constructors. + * as a side-product, we retrieve the capture parameters. + */ + val (exports, jsClassCaptures) = { + val exports = List.newBuilder[jsExportsGen.Exported] + val jsClassCaptures = List.newBuilder[js.ParamDef] + + def add(tree: ConstructorTree[_ <: JSCtor]): Unit = { + val (e, c) = genJSClassCtorDispatch(tree.ctor.sym, + tree.ctor.paramsAndInfo, tree.overloadNum) + exports += e + jsClassCaptures ++= c + tree.subCtors.foreach(add(_)) + } + + add(ctorTree) + + (exports.result(), jsClassCaptures.result()) + } + + // The name 'constructor' is used for error reporting here + val (formalArgs, restParam, overloadDispatchBody) = + jsExportsGen.genOverloadDispatch(JSName.Literal("constructor"), exports, jstpe.IntType) + + val overloadVar = js.VarDef(freshLocalIdent("overload"), NoOriginalName, + jstpe.IntType, mutable = false, overloadDispatchBody) + + val constructorBody = wrapJSCtorBody( + paramVarDefs :+ overloadVar, + genJSClassCtorBody(overloadVar.ref, ctorTree), + js.Undefined() :: Nil + ) + + val constructorDef = js.JSConstructorDef( + js.MemberFlags.empty.withNamespace(js.MemberNamespace.Constructor), + formalArgs, restParam, constructorBody)(OptimizerHints.empty, None) + + (jsClassCaptures, constructorDef) + } + + private def genPrimaryJSClassCtor(dd: DefDef): PrimaryJSCtor = { + val sym = dd.symbol + val Block(stats, _) = dd.rhs: @unchecked + assert(sym.isPrimaryConstructor, s"called with non-primary ctor: $sym") + + var jsSuperCall: Option[js.JSSuperConstructorCall] = None + val jsStats = List.newBuilder[js.Tree] + + /* Move all statements after the super constructor call since JS + * cannot access `this` before the super constructor call. + * + * dotc inserts statements before the super constructor call for param + * accessor initializers (including val's and var's declared in the + * params). We move those after the super constructor call, and are + * therefore executed later than for a Scala class. + */ + withPerMethodBodyState(sym) { + stats.foreach { + case tree @ Apply(fun @ Select(Super(This(_), _), _), args) + if fun.symbol.isClassConstructor => + assert(jsSuperCall.isEmpty, s"Found 2 JS Super calls at ${dd.sourcePos}") + implicit val pos: Position = tree.span + jsSuperCall = Some(js.JSSuperConstructorCall(genActualJSArgs(fun.symbol, args))) + + case stat => + val jsStat = genStat(stat) + assert(jsSuperCall.isDefined || !jsStat.isInstanceOf[js.VarDef], + "Trying to move a local VarDef after the super constructor call of a non-native JS class at " + + dd.sourcePos) + jsStats += jsStat + } + } + + assert(jsSuperCall.isDefined, + s"Did not find Super call in primary JS construtor at ${dd.sourcePos}") + + new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), + js.JSConstructorBody(Nil, jsSuperCall.get, jsStats.result())(dd.span)) + } + + private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = { + val sym = dd.symbol + assert(!sym.isPrimaryConstructor, s"called with primary ctor $sym") + + def flattenBlocks(t: Tree): List[Tree] = t match { + case Block(stats, expr) => (stats :+ expr).flatMap(flattenBlocks) + case _ => t :: Nil + } + val stats = flattenBlocks(dd.rhs) + + val beforeThisCall = List.newBuilder[js.Tree] + var thisCall: Option[(Symbol, List[js.Tree])] = None + val afterThisCall = List.newBuilder[js.Tree] + + withPerMethodBodyState(sym) { + stats.foreach { + case tree @ Apply(fun @ Select(This(_), _), args) + if fun.symbol.isClassConstructor => + assert(thisCall.isEmpty, + s"duplicate this() call in secondary JS constructor at ${dd.sourcePos}") + + implicit val pos: Position = tree.span + val sym = fun.symbol + thisCall = Some((sym, genActualArgs(sym, args))) + + case stat => + val jsStat = genStat(stat) + if (thisCall.isEmpty) + beforeThisCall += jsStat + else + afterThisCall += jsStat + } + } + + assert(thisCall.isDefined, + i"could not find the this() call in secondary JS constructor at ${dd.sourcePos}:\n${stats.map(_.show).mkString("\n")}") + val Some((targetCtor, ctorArgs)) = thisCall: @unchecked + + new SplitSecondaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), + beforeThisCall.result(), targetCtor, ctorArgs, afterThisCall.result()) + } + + private def genParamsAndInfo(ctorSym: Symbol, + vparamss: List[ParamClause]): List[(Symbol, JSParamInfo)] = { + implicit val pos: SourcePosition = ctorSym.sourcePos + + val paramSyms = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) + paramSyms.zip(ctorSym.jsParamInfos) + } + + private def genJSClassCtorDispatch(ctorSym: Symbol, + allParamsAndInfos: List[(Symbol, JSParamInfo)], + overloadNum: Int): (jsExportsGen.Exported, List[js.ParamDef]) = { + + implicit val pos: SourcePosition = ctorSym.sourcePos + + /* `allParams` are the parameters as seen from inside the constructor body, + * i.e., the ones generated by the trees in the constructor body. + */ + val (captureParamsAndInfos, normalParamsAndInfos) = + allParamsAndInfos.partition(_._2.capture) + + /* For class captures, we need to generate different names than the ones + * used by the constructor body. This is necessary so that we can forward + * captures properly between constructor delegation calls. + */ + val (jsClassCaptures, captureAssigns) = (for { + (param, info) <- captureParamsAndInfos + } yield { + val ident = freshLocalIdent(param.name.toTermName) + val jsClassCapture = + js.ParamDef(ident, originalNameOfLocal(param), toIRType(info.info), mutable = false) + val captureAssign = + js.Assign(genVarRef(param), jsClassCapture.ref) + (jsClassCapture, captureAssign) + }).unzip + + val normalInfos = normalParamsAndInfos.map(_._2).toIndexedSeq + + val jsExport = new jsExportsGen.Exported(ctorSym, normalInfos) { + def genBody(formalArgsRegistry: jsExportsGen.FormalArgsRegistry): js.Tree = { + val paramAssigns = for { + ((param, info), i) <- normalParamsAndInfos.zipWithIndex + } yield { + val rhs = jsExportsGen.genScalaArg(this, i, formalArgsRegistry, info, static = true, + captures = captureParamsAndInfos.map(pi => genVarRef(pi._1)))( + prevArgsCount => normalParamsAndInfos.take(prevArgsCount).map(pi => genVarRef(pi._1))) + + js.Assign(genVarRef(param), rhs) + } + + js.Block(captureAssigns ::: paramAssigns, js.IntLiteral(overloadNum)) + } + } + + (jsExport, jsClassCaptures) + } + + /** Generates a JS constructor body based on a constructor tree. */ + private def genJSClassCtorBody(overloadVar: js.VarRef, + ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.JSConstructorBody = { + + /* generates a statement that conditionally executes body iff the chosen + * overload is any of the descendants of `tree` (including itself). + * + * here we use the property from building the trees, that a set of + * descendants always has a range of overload numbers. + */ + def ifOverload(tree: ConstructorTree[_], body: js.Tree): js.Tree = body match { + case js.Skip() => js.Skip() + + case body => + val x = overloadVar + val cond = { + import tree.{lo, hi} + + if (lo == hi) { + js.BinaryOp(js.BinaryOp.Int_==, js.IntLiteral(lo), x) + } else { + val lhs = js.BinaryOp(js.BinaryOp.Int_<=, js.IntLiteral(lo), x) + val rhs = js.BinaryOp(js.BinaryOp.Int_<=, x, js.IntLiteral(hi)) + js.If(lhs, rhs, js.BooleanLiteral(false))(jstpe.BooleanType) + } + } + + js.If(cond, body, js.Skip())(jstpe.NoType) + } + + /* preStats / postStats use pre/post order traversal respectively to + * generate a topo-sorted sequence of statements. + */ + + def preStats(tree: ConstructorTree[SplitSecondaryJSCtor], + nextParamsAndInfo: List[(Symbol, JSParamInfo)]): js.Tree = { + val inner = tree.subCtors.map(preStats(_, tree.ctor.paramsAndInfo)) + + assert(tree.ctor.ctorArgs.size == nextParamsAndInfo.size, "param count mismatch") + val paramsInfosAndArgs = nextParamsAndInfo.zip(tree.ctor.ctorArgs) + + val (captureParamsInfosAndArgs, normalParamsInfosAndArgs) = + paramsInfosAndArgs.partition(_._1._2.capture) + + val captureAssigns = for { + ((param, _), arg) <- captureParamsInfosAndArgs + } yield { + js.Assign(genVarRef(param), arg) + } + + val normalAssigns = for { + (((param, info), arg), i) <- normalParamsInfosAndArgs.zipWithIndex + } yield { + val newArg = arg match { + case js.Transient(UndefinedParam) => + /* Go full circle: We have ignored the default param getter for + * this, we'll create it again. + * + * This seems not optimal: We could simply not ignore the calls to + * default param getters in the first place. + * + * However, this proves to be difficult: Because of translations in + * earlier phases, calls to default param getters may be assigned + * to temporary variables first (see the undefinedDefaultParams + * ScopedVar). If this happens, it becomes increasingly difficult + * to distinguish a default param getter call for a constructor + * call of *this* instance (in which case we would want to keep + * the default param getter call) from one for a *different* + * instance (in which case we would want to discard the default + * param getter call) + * + * Because of this, it ends up being easier to just re-create the + * default param getter call if necessary. + */ + implicit val pos: SourcePosition = tree.ctor.sym.sourcePos + jsExportsGen.genCallDefaultGetter(tree.ctor.sym, i, static = false, + captures = captureParamsInfosAndArgs.map(p => genVarRef(p._1._1)))( + prevArgsCount => normalParamsInfosAndArgs.take(prevArgsCount).map(p => genVarRef(p._1._1))) + + case arg => arg + } + + js.Assign(genVarRef(param), newArg) + } + + ifOverload(tree, js.Block( + inner ++ tree.ctor.beforeCall ++ captureAssigns ++ normalAssigns)) + } + + def postStats(tree: ConstructorTree[SplitSecondaryJSCtor]): js.Tree = { + val inner = tree.subCtors.map(postStats(_)) + ifOverload(tree, js.Block(tree.ctor.afterCall ++ inner)) + } + + val primaryCtor = ctorTree.ctor + val secondaryCtorTrees = ctorTree.subCtors + + wrapJSCtorBody( + secondaryCtorTrees.map(preStats(_, primaryCtor.paramsAndInfo)), + primaryCtor.body, + secondaryCtorTrees.map(postStats(_)) + ) + } + + private def wrapJSCtorBody(before: List[js.Tree], body: js.JSConstructorBody, + after: List[js.Tree]): js.JSConstructorBody = { + js.JSConstructorBody(before ::: body.beforeSuper, body.superCall, + body.afterSuper ::: after)(body.pos) + } + + private sealed trait JSCtor { + val sym: Symbol + val paramsAndInfo: List[(Symbol, JSParamInfo)] + } + + private class PrimaryJSCtor(val sym: Symbol, + val paramsAndInfo: List[(Symbol, JSParamInfo)], + val body: js.JSConstructorBody) extends JSCtor + + private class SplitSecondaryJSCtor(val sym: Symbol, + val paramsAndInfo: List[(Symbol, JSParamInfo)], + val beforeCall: List[js.Tree], + val targetCtor: Symbol, val ctorArgs: List[js.Tree], + val afterCall: List[js.Tree]) extends JSCtor + + private class ConstructorTree[Ctor <: JSCtor]( + val overloadNum: Int, val ctor: Ctor, + val subCtors: List[ConstructorTree[SplitSecondaryJSCtor]]) { + val lo: Int = overloadNum + val hi: Int = subCtors.lastOption.fold(lo)(_.hi) + + assert(lo <= hi, "bad overload range") + } + + // Generate a method ------------------------------------------------------- + + /** Generates the JSNativeMemberDef. */ + def genJSNativeMemberDef(tree: ValOrDefDef): js.JSNativeMemberDef = { + implicit val pos = tree.span + + val sym = tree.symbol + val flags = js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic) + val methodName = encodeJSNativeMemberSym(sym) + val jsNativeLoadSpec = computeJSNativeLoadSpecOfValDef(sym) + js.JSNativeMemberDef(flags, methodName, jsNativeLoadSpec) + } + + private def genMethod(dd: DefDef): Option[js.MethodDef] = { + withScopedVars( + localNames := new LocalNameGenerator + ) { + genMethodWithCurrentLocalNameScope(dd) + } + } + + /** Gen JS code for a method definition in a class or in an impl class. + * On the JS side, method names are mangled to encode the full signature + * of the Scala method, as described in `JSEncoding`, to support + * overloading. + * + * Some methods are not emitted at all: + * - Primitives, since they are never actually called + * - Constructors of hijacked classes + * + * Constructors are emitted by generating their body as a statement. + * + * Other (normal) methods are emitted with `genMethodBody()`. + */ + private def genMethodWithCurrentLocalNameScope(dd: DefDef): Option[js.MethodDef] = { + implicit val pos = dd.span + val sym = dd.symbol + val vparamss = dd.termParamss + val rhs = dd.rhs + + /* Is this method a default accessor that should be ignored? + * + * This is the case iff one of the following applies: + * - It is a constructor default accessor and the linked class is a + * native JS class. + * - It is a default accessor for a native JS def, but with the caveat + * that its rhs must be `js.native` because of #4553. + * + * Both of those conditions can only happen if the default accessor is in + * a module class, so we use that as a fast way out. (But omitting that + * condition would not change the result.) + * + * This is different than `isJSDefaultParam` in `genApply`: we do not + * ignore default accessors of *non-native* JS types. Neither for + * constructor default accessor nor regular default accessors. We also + * do not need to worry about non-constructor members of native JS types, + * since for those, the entire member list is ignored in `genJSClassData`. + */ + def isIgnorableDefaultParam: Boolean = { + sym.name.is(DefaultGetterName) && sym.owner.is(ModuleClass) && { + val info = new DefaultParamInfo(sym) + if (info.isForConstructor) { + /* This is a default accessor for a constructor parameter. Check + * whether the attached constructor is a native JS constructor, + * which is the case iff the linked class is a native JS type. + */ + info.constructorOwner.hasAnnotation(jsdefn.JSNativeAnnot) + } else { + /* #4553 We need to ignore default accessors for JS native defs. + * However, because Scala.js <= 1.7.0 actually emitted code calling + * those accessors, we must keep default accessors that would + * compile. The only accessors we can actually get rid of are those + * that are `= js.native`. + */ + !sym.owner.isJSType && + info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) && { + dd.rhs match { + case MaybeAsInstanceOf(Apply(fun, _)) => + fun.symbol == jsdefn.JSPackage_native + case _ => + false + } + } + } + } + } + + withPerMethodBodyState(sym) { + assert(vparamss.isEmpty || vparamss.tail.isEmpty, + "Malformed parameter list: " + vparamss) + val params = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) + + val methodName = encodeMethodSym(sym) + val originalName = originalNameOfMethod(sym) + + def jsParams = params.map(genParamDef(_)) + + if (primitives.isPrimitive(sym)) { + None + } else if (sym.is(Deferred) && currentClassSym.isNonNativeJSClass) { + // scala-js/#4409: Do not emit abstract methods in non-native JS classes + None + } else if (sym.is(Deferred)) { + Some(js.MethodDef(js.MemberFlags.empty, methodName, originalName, + jsParams, toIRType(patchedResultType(sym)), None)( + OptimizerHints.empty, None)) + } else if (isIgnorableDefaultParam) { + // #11592 + None + } else if (sym.is(Bridge) && sym.name.is(DefaultGetterName) && currentClassSym.isNonNativeJSClass) { + /* #12572 Bridges for default accessors in non-native JS classes must not be emitted, + * because they call another default accessor, making their entire body an + * that cannot be eliminated. + * Such methods are never called anyway, because they are filtered out in + * JSExportsGen.defaultGetterDenot(). + */ + None + } else /*if (sym.isClassConstructor && isHijackedBoxedClass(sym.owner)) { + None + } else*/ { + /*def isTraitImplForwarder = dd.rhs match { + case app: Apply => foreignIsImplClass(app.symbol.owner) + case _ => false + }*/ + + val shouldMarkInline = { + sym.hasAnnotation(jsdefn.InlineAnnot) || + sym.isAnonymousFunction + } + + val shouldMarkNoinline = { + sym.hasAnnotation(jsdefn.NoinlineAnnot) /*&& + !isTraitImplForwarder*/ + } + + val optimizerHints = { + OptimizerHints.empty + .withInline(shouldMarkInline) + .withNoinline(shouldMarkNoinline) + } + + val methodDef = { + if (sym.isClassConstructor) { + val namespace = js.MemberNamespace.Constructor + js.MethodDef(js.MemberFlags.empty.withNamespace(namespace), + methodName, originalName, jsParams, jstpe.NoType, Some(genStat(rhs)))( + optimizerHints, None) + } else { + val namespace = if (isMethodStaticInIR(sym)) { + if (sym.isPrivate) js.MemberNamespace.PrivateStatic + else js.MemberNamespace.PublicStatic + } else { + if (sym.isPrivate) js.MemberNamespace.Private + else js.MemberNamespace.Public + } + val resultIRType = toIRType(patchedResultType(sym)) + genMethodDef(namespace, methodName, originalName, + params, resultIRType, rhs, optimizerHints) + } + } + + Some(methodDef) + } + } + } + + /** Generates the MethodDef of a (non-constructor) method + * + * Most normal methods are emitted straightforwardly. If the result + * type is Unit, then the body is emitted as a statement. Otherwise, it is + * emitted as an expression. + * + * Instance methods in non-native JS classes are compiled as static methods + * taking an explicit parameter for their `this` value. Static methods in + * non-native JS classes are compiled as is, like methods in Scala classes. + */ + private def genMethodDef(namespace: js.MemberNamespace, methodName: js.MethodIdent, + originalName: OriginalName, paramsSyms: List[Symbol], resultIRType: jstpe.Type, + tree: Tree, optimizerHints: OptimizerHints): js.MethodDef = { + implicit val pos = tree.span + + val jsParams = paramsSyms.map(genParamDef(_)) + + def genBody() = localNames.makeLabeledIfRequiresEnclosingReturn(resultIRType) { + if (resultIRType == jstpe.NoType) genStat(tree) + else genExpr(tree) + } + + if (namespace.isStatic || !currentClassSym.isNonNativeJSClass) { + val flags = js.MemberFlags.empty.withNamespace(namespace) + js.MethodDef(flags, methodName, originalName, jsParams, resultIRType, Some(genBody()))( + optimizerHints, None) + } else { + val thisLocalIdent = freshLocalIdent("this") + withScopedVars( + thisLocalVarIdent := Some(thisLocalIdent) + ) { + val staticNamespace = + if (namespace.isPrivate) js.MemberNamespace.PrivateStatic + else js.MemberNamespace.PublicStatic + val flags = + js.MemberFlags.empty.withNamespace(staticNamespace) + val thisParamDef = js.ParamDef(thisLocalIdent, thisOriginalName, + jstpe.AnyType, mutable = false) + + js.MethodDef(flags, methodName, originalName, + thisParamDef :: jsParams, resultIRType, Some(genBody()))( + optimizerHints, None) + } + } + } + + // ParamDefs --------------------------------------------------------------- + + def genParamDef(sym: Symbol): js.ParamDef = + genParamDef(sym, toIRType(sym.info)) + + private def genParamDef(sym: Symbol, ptpe: jstpe.Type): js.ParamDef = + genParamDef(sym, ptpe, sym.span) + + private def genParamDef(sym: Symbol, pos: Position): js.ParamDef = + genParamDef(sym, toIRType(sym.info), pos) + + private def genParamDef(sym: Symbol, ptpe: jstpe.Type, pos: Position): js.ParamDef = { + js.ParamDef(encodeLocalSym(sym)(implicitly, pos, implicitly), + originalNameOfLocal(sym), ptpe, mutable = false)(pos) + } + + // Generate statements and expressions ------------------------------------- + + /** Gen JS code for a tree in statement position (in the IR). + */ + private def genStat(tree: Tree): js.Tree = { + exprToStat(genStatOrExpr(tree, isStat = true)) + } + + /** Turn a JavaScript expression of type Unit into a statement */ + private def exprToStat(tree: js.Tree): js.Tree = { + /* Any JavaScript expression is also a statement, but at least we get rid + * of some pure expressions that come from our own codegen. + */ + implicit val pos = tree.pos + tree match { + case js.Block(stats :+ expr) => + js.Block(stats :+ exprToStat(expr)) + case _:js.Literal | _:js.This | _:js.VarRef => + js.Skip() + case _ => + tree + } + } + + /** Gen JS code for a tree in expression position (in the IR). + */ + private def genExpr(tree: Tree): js.Tree = { + val result = genStatOrExpr(tree, isStat = false) + assert(result.tpe != jstpe.NoType, + s"genExpr($tree) returned a tree with type NoType at pos ${tree.span}") + result + } + + def genExpr(name: JSName)(implicit pos: SourcePosition): js.Tree = name match { + case JSName.Literal(name) => js.StringLiteral(name) + case JSName.Computed(sym) => genComputedJSName(sym) + } + + private def genComputedJSName(sym: Symbol)(implicit pos: SourcePosition): js.Tree = { + /* By construction (i.e. restriction in PrepJSInterop), we know that sym + * must be a static method. + * Therefore, at this point, we can invoke it by loading its owner and + * calling it. + */ + def moduleOrGlobalScope = genLoadModuleOrGlobalScope(sym.owner) + def module = genLoadModule(sym.owner) + + if (sym.owner.isJSType) { + if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) + genApplyJSMethodGeneric(sym, moduleOrGlobalScope, args = Nil, isStat = false) + else + genApplyJSClassMethod(module, sym, arguments = Nil) + } else { + genApplyMethod(module, sym, arguments = Nil) + } + } + + /** Gen JS code for a tree in expression position (in the IR) or the + * global scope. + */ + def genExprOrGlobalScope(tree: Tree): MaybeGlobalScope = { + implicit def pos: SourcePosition = tree.sourcePos + + tree match { + case _: This => + val sym = tree.symbol + if (sym != currentClassSym.get && sym.is(Module)) + genLoadModuleOrGlobalScope(sym) + else + MaybeGlobalScope.NotGlobalScope(genExpr(tree)) + + case _:Ident | _:Select => + val sym = tree.symbol + if (sym.is(Module)) { + assert(!sym.is(PackageClass), "Cannot use package as value: " + tree) + genLoadModuleOrGlobalScope(sym) + } else { + MaybeGlobalScope.NotGlobalScope(genExpr(tree)) + } + + case Apply(fun, _) => + if (fun.symbol == jsdefn.JSDynamic_global) + MaybeGlobalScope.GlobalScope(pos) + else + MaybeGlobalScope.NotGlobalScope(genExpr(tree)) + + case _ => + MaybeGlobalScope.NotGlobalScope(genExpr(tree)) + } + } + + /** Gen JS code for a tree in statement or expression position (in the IR). + * + * This is the main transformation method. Each node of the Scala AST + * is transformed into an equivalent portion of the JS AST. + */ + private def genStatOrExpr(tree: Tree, isStat: Boolean): js.Tree = { + implicit val pos: SourcePosition = tree.sourcePos + + report.debuglog(" " + tree) + report.debuglog("") + + tree match { + /** Local val or var declaration */ + case tree @ ValDef(name, _, _) => + val sym = tree.symbol + val rhs = tree.rhs + val rhsTree = genExpr(rhs) + + rhsTree match { + case js.Transient(UndefinedParam) => + /* This is an intermediate assignment for default params on a + * js.Any. Add the symbol to the corresponding set to inform + * the Ident resolver how to replace it and don't emit the symbol. + */ + undefinedDefaultParams += sym + js.Skip() + case _ => + js.VarDef(encodeLocalSym(sym), originalNameOfLocal(sym), + toIRType(sym.info), sym.is(Mutable), rhsTree) + } + + case If(cond, thenp, elsep) => + val tpe = + if (isStat) jstpe.NoType + else toIRType(tree.tpe) + + js.If(genExpr(cond), genStatOrExpr(thenp, isStat), + genStatOrExpr(elsep, isStat))(tpe) + + case Labeled(bind, expr) => + js.Labeled(encodeLabelSym(bind.symbol), toIRType(tree.tpe), genStatOrExpr(expr, isStat)) + + case Return(expr, from) => + val fromSym = from.symbol + val label = + if (fromSym.is(Label)) encodeLabelSym(fromSym) + else localNames.get.getEnclosingReturnLabel() + js.Return(toIRType(expr.tpe) match { + case jstpe.NoType => js.Block(genStat(expr), js.Undefined()) + case _ => genExpr(expr) + }, label) + + case WhileDo(cond, body) => + val genCond = + if (cond == EmptyTree) js.BooleanLiteral(true) + else genExpr(cond) + js.While(genCond, genStat(body)) + + case t: Try => + genTry(t, isStat) + + case app: Apply => + genApply(app, isStat) + + case app: TypeApply => + genTypeApply(app) + + /*case app: ApplyDynamic => + genApplyDynamic(app)*/ + + case tree: This => + val currentClass = currentClassSym.get + val symIsModuleClass = tree.symbol.is(ModuleClass) + assert(tree.symbol == currentClass || symIsModuleClass, + s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $currentClass") + if (symIsModuleClass && tree.symbol != currentClass) + genLoadModule(tree.symbol) + else + genThis() + + case Select(qualifier, _) => + val sym = tree.symbol + if (sym.is(Module)) { + assert(!sym.is(Package), "Cannot use package as value: " + tree) + genLoadModule(sym) + } else if (sym.is(JavaStatic)) { + genLoadStaticField(sym) + } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { + genJSNativeMemberSelect(tree) + } else { + val (field, boxed) = genAssignableField(sym, qualifier) + if (boxed) unbox(field, atPhase(elimErasedValueTypePhase)(sym.info)) + else field + } + + case tree: Ident => + desugarIdent(tree).fold[js.Tree] { + val sym = tree.symbol + assert(!sym.is(Package), "Cannot use package as value: " + tree) + if (sym.is(Module)) { + genLoadModule(sym) + } else if (undefinedDefaultParams.contains(sym)) { + /* This is a default parameter whose assignment was moved to + * a local variable. Put an undefined param instead. + */ + js.Transient(UndefinedParam) + } else { + genVarRef(sym) + } + } { select => + genStatOrExpr(select, isStat) + } + + case Literal(value) => + import Constants._ + value.tag match { + case UnitTag => + js.Skip() + case BooleanTag => + js.BooleanLiteral(value.booleanValue) + case ByteTag => + js.ByteLiteral(value.byteValue) + case ShortTag => + js.ShortLiteral(value.shortValue) + case CharTag => + js.CharLiteral(value.charValue) + case IntTag => + js.IntLiteral(value.intValue) + case LongTag => + js.LongLiteral(value.longValue) + case FloatTag => + js.FloatLiteral(value.floatValue) + case DoubleTag => + js.DoubleLiteral(value.doubleValue) + case StringTag => + js.StringLiteral(value.stringValue) + case NullTag => + js.Null() + case ClazzTag => + genClassConstant(value.typeValue) + } + + case Block(stats, expr) => + // #15419 Collapse { ; BoxedUnit } to + val genStatsAndExpr0 = stats.map(genStat(_)) :+ genStatOrExpr(expr, isStat) + val genStatsAndExpr = genStatsAndExpr0 match { + case (undefParam @ js.Transient(UndefinedParam)) :: js.Undefined() :: Nil => + undefParam :: Nil + case _ => + genStatsAndExpr0 + } + js.Block(genStatsAndExpr) + + case Typed(expr, _) => + expr match { + case _: Super => genThis() + case _ => genExpr(expr) + } + + case Assign(lhs0, rhs) => + val sym = lhs0.symbol + if (sym.is(JavaStaticTerm) && sym.source != ctx.compilationUnit.source) + throw new FatalError(s"Assignment to static member ${sym.fullName} not supported") + def genRhs = genExpr(rhs) + val lhs = lhs0 match { + case lhs: Ident => desugarIdent(lhs).getOrElse(lhs) + case lhs => lhs + } + lhs match { + case lhs: Select => + val qualifier = lhs.qualifier + + def ctorAssignment = ( + currentMethodSym.get.name == nme.CONSTRUCTOR && + currentMethodSym.get.owner == qualifier.symbol && + qualifier.isInstanceOf[This] + ) + // TODO This fails for OFFSET$x fields. Re-enable when we can. + /*if (!sym.is(Mutable) && !ctorAssignment) + throw new FatalError(s"Assigning to immutable field ${sym.fullName} at $pos")*/ + + if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { + /* This is an assignment to a @js.native field. Since we reject + * `@js.native var`s as compile errors, this can only happen in + * the constructor of the enclosing object. + * We simply ignore the assignment, since the field will not be + * emitted at all. + */ + js.Skip() + } else { + val (field, boxed) = genAssignableField(sym, qualifier) + if (boxed) { + val genBoxedRhs = box(genRhs, atPhase(elimErasedValueTypePhase)(sym.info)) + js.Assign(field, genBoxedRhs) + } else { + js.Assign(field, genRhs) + } + } + + case _ => + js.Assign(genVarRef(sym), genRhs) + } + + /** Array constructor */ + case javaSeqLiteral: JavaSeqLiteral => + genJavaSeqLiteral(javaSeqLiteral) + + /** A Match reaching the backend is supposed to be optimized as a switch */ + case mtch: Match => + genMatch(mtch, isStat) + + case tree: Closure => + genClosure(tree) + + case EmptyTree => + js.Skip() + + case _ => + throw new FatalError("Unexpected tree in genExpr: " + + tree + "/" + tree.getClass + " at: " + (tree.span: Position)) + } + } // end of genStatOrExpr() + + private def qualifierOf(fun: Tree): Tree = fun match { + case fun: Ident => + fun.tpe match { + case TermRef(prefix: TermRef, _) => tpd.ref(prefix) + case TermRef(prefix: ThisType, _) => tpd.This(prefix.cls) + } + case Select(qualifier, _) => + qualifier + case TypeApply(fun, _) => + qualifierOf(fun) + } + + /** Gen JS this of the current class. + * Normally encoded straightforwardly as a JS this. + * But must be replaced by the `thisLocalVarIdent` local variable if there + * is one. + */ + private def genThis()(implicit pos: Position): js.Tree = { + /*if (tryingToGenMethodAsJSFunction) { + throw new CancelGenMethodAsJSFunction( + "Trying to generate `this` inside the body") + }*/ + + thisLocalVarIdent.fold[js.Tree] { + js.This()(currentThisType) + } { thisLocalIdent => + js.VarRef(thisLocalIdent)(currentThisType) + } + } + + /** Gen IR code for a `try..catch` or `try..finally` block. + * + * `try..finally` blocks are compiled straightforwardly to `try..finally` + * blocks of the IR. + * + * `try..catch` blocks are a bit more subtle, as the IR does not have + * type-based selection of exceptions to catch. We thus encode explicitly + * the type tests, like in: + * + * ``` + * try { ... } + * catch (e) { + * if (e.isInstanceOf[IOException]) { ... } + * else if (e.isInstanceOf[Exception]) { ... } + * else { + * throw e; // default, re-throw + * } + * } + * ``` + * + * In addition, there are provisions to handle catching JavaScript + * exceptions (which do not extend `Throwable`) as wrapped in a + * `js.JavaScriptException`. + */ + private def genTry(tree: Try, isStat: Boolean): js.Tree = { + implicit val pos: SourcePosition = tree.sourcePos + val Try(block, catches, finalizer) = tree + + val blockAST = genStatOrExpr(block, isStat) + + val resultType = + if (isStat) jstpe.NoType + else toIRType(tree.tpe) + + val handled = + if (catches.isEmpty) blockAST + else genTryCatch(blockAST, catches, resultType, isStat) + + genStat(finalizer) match { + case js.Skip() => handled + case ast => js.TryFinally(handled, ast) + } + } + + private def genTryCatch(body: js.Tree, catches: List[CaseDef], + resultType: jstpe.Type, + isStat: Boolean)(implicit pos: SourcePosition): js.Tree = { + val exceptIdent = freshLocalIdent("e") + val origExceptVar = js.VarRef(exceptIdent)(jstpe.AnyType) + + val mightCatchJavaScriptException = catches.exists { caseDef => + caseDef.pat match { + case Typed(Ident(nme.WILDCARD), tpt) => + isMaybeJavaScriptException(tpt.tpe) + case Ident(nme.WILDCARD) => + true + case pat @ Bind(_, _) => + isMaybeJavaScriptException(pat.symbol.info) + } + } + + val (exceptValDef, exceptVar) = if (mightCatchJavaScriptException) { + val valDef = js.VarDef(freshLocalIdent("e"), NoOriginalName, + encodeClassType(defn.ThrowableClass), mutable = false, js.WrapAsThrowable(origExceptVar)) + (valDef, valDef.ref) + } else { + (js.Skip(), origExceptVar) + } + + val elseHandler: js.Tree = js.Throw(origExceptVar) + + val handler = catches.foldRight(elseHandler) { (caseDef, elsep) => + implicit val pos: SourcePosition = caseDef.sourcePos + val CaseDef(pat, _, body) = caseDef + + // Extract exception type and variable + val (tpe, boundVar) = (pat match { + case Typed(Ident(nme.WILDCARD), tpt) => + (tpt.tpe, None) + case Ident(nme.WILDCARD) => + (defn.ThrowableType, None) + case Bind(_, _) => + val ident = encodeLocalSym(pat.symbol) + val origName = originalNameOfLocal(pat.symbol) + (pat.symbol.info, Some(ident, origName)) + }) + + // Generate the body that must be executed if the exception matches + val bodyWithBoundVar = (boundVar match { + case None => + genStatOrExpr(body, isStat) + case Some((boundVarIdent, boundVarOriginalName)) => + val castException = genAsInstanceOf(exceptVar, tpe) + js.Block( + js.VarDef(boundVarIdent, boundVarOriginalName, toIRType(tpe), + mutable = false, castException), + genStatOrExpr(body, isStat)) + }) + + // Generate the test + if (tpe =:= defn.ThrowableType) { + bodyWithBoundVar + } else { + val cond = genIsInstanceOf(exceptVar, tpe) + js.If(cond, bodyWithBoundVar, elsep)(resultType) + } + } + + js.TryCatch(body, exceptIdent, NoOriginalName, + js.Block(exceptValDef, handler))(resultType) + } + + /** Gen JS code for an Apply node (method call) + * + * There's a whole bunch of varieties of Apply nodes: regular method + * calls, super calls, constructor calls, isInstanceOf/asInstanceOf, + * primitives, JS calls, etc. They are further dispatched in here. + */ + private def genApply(tree: Apply, isStat: Boolean): js.Tree = { + implicit val pos = tree.span + val args = tree.args + val sym = tree.fun.symbol + + /* Is the method a JS default accessor, which should become an + * `UndefinedParam` rather than being compiled normally. + * + * This is true iff one of the following conditions apply: + * - It is a constructor default param for the constructor of a JS class. + * - It is a default param of an instance method of a native JS type. + * - It is a default param of an instance method of a non-native JS type + * and the attached method is exposed. + * - It is a default param for a native JS def. + * + * This is different than `isIgnorableDefaultParam` in + * `genMethodWithCurrentLocalNameScope`: we include here the default + * accessors of *non-native* JS types (unless the corresponding methods are + * not exposed). We also need to handle non-constructor members of native + * JS types. + */ + def isJSDefaultParam: Boolean = { + sym.name.is(DefaultGetterName) && { + val info = new DefaultParamInfo(sym) + if (info.isForConstructor) { + /* This is a default accessor for a constructor parameter. Check + * whether the attached constructor is a JS constructor, which is + * the case iff the linked class is a JS type. + */ + info.constructorOwner.isJSType + } else { + if (sym.owner.isJSType) { + /* The default accessor is in a JS type. It is a JS default + * param iff the enclosing class is native or the attached method + * is exposed. + */ + !sym.owner.isNonNativeJSClass || info.attachedMethod.isJSExposed + } else { + /* The default accessor is in a Scala type. It is a JS default + * param iff the attached method is a native JS def. This can + * only happen if the owner is a module class, which we test + * first as a fast way out. + */ + sym.owner.is(ModuleClass) && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) + } + } + } + } + + tree.fun match { + case _ if isJSDefaultParam => + js.Transient(UndefinedParam) + + case Select(Super(_, _), _) => + genSuperCall(tree, isStat) + + case Select(New(_), nme.CONSTRUCTOR) => + genApplyNew(tree) + + case _ => + if (primitives.isPrimitive(tree)) { + genPrimitiveOp(tree, isStat) + } else if (Erasure.Boxing.isBox(sym)) { + // Box a primitive value (cannot be Unit) + val arg = args.head + makePrimitiveBox(genExpr(arg), arg.tpe) + } else if (Erasure.Boxing.isUnbox(sym)) { + // Unbox a primitive value (cannot be Unit) + val arg = args.head + makePrimitiveUnbox(genExpr(arg), tree.tpe) + } else { + genNormalApply(tree, isStat) + } + } + } + + /** Gen JS code for a super call, of the form Class.super[mix].fun(args). + * + * This does not include calls defined in mixin traits, as these are + * already desugared by the 'mixin' phase. Only calls to super classes + * remain. + * + * Since a class has exactly one direct superclass, and calling a method + * two classes above the current one is invalid in Scala, the `mix` item is + * irrelevant. + */ + private def genSuperCall(tree: Apply, isStat: Boolean): js.Tree = { + implicit val pos = tree.span + val Apply(fun @ Select(sup @ Super(qual, _), _), args) = tree: @unchecked + val sym = fun.symbol + + if (sym == defn.Any_getClass) { + // The only primitive that is also callable as super call + js.GetClass(genThis()) + } else if (currentClassSym.isNonNativeJSClass) { + genJSSuperCall(tree, isStat) + } else { + /* #3013 `qual` can be `this.$outer()` in some cases since Scala 2.12, + * so we call `genExpr(qual)`, not just `genThis()`. + */ + val superCall = genApplyMethodStatically( + genExpr(qual), sym, genActualArgs(sym, args)) + + // Initialize the module instance just after the super constructor call. + if (isStaticModule(currentClassSym) && !isModuleInitialized.get.value && + currentMethodSym.get.isClassConstructor) { + isModuleInitialized.get.value = true + val className = encodeClassName(currentClassSym) + val thisType = jstpe.ClassType(className) + val initModule = js.StoreModule(className, js.This()(thisType)) + js.Block(superCall, initModule) + } else { + superCall + } + } + } + + /** Gen JS code for a constructor call (new). + * Further refined into: + * * new String(...) + * * new of a hijacked boxed class + * * new of an anonymous function class that was recorded as JS function + * * new of a raw JS class + * * new Array + * * regular new + */ + private def genApplyNew(tree: Apply): js.Tree = { + implicit val pos: SourcePosition = tree.sourcePos + + val Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) = tree: @unchecked + val ctor = fun.symbol + val tpe = tpt.tpe + + assert(ctor.isClassConstructor, + "'new' call to non-constructor: " + ctor.name) + + val clsSym = tpe.typeSymbol + + if (isHijackedClass(clsSym)) { + genNewHijackedClass(clsSym, ctor, args.map(genExpr)) + } else /*if (translatedAnonFunctions contains tpe.typeSymbol) { + val functionMaker = translatedAnonFunctions(tpe.typeSymbol) + functionMaker(args map genExpr) + } else*/ if (clsSym.isJSType) { + genNewJSClass(tree) + } else { + toTypeRef(tpe) match { + case jstpe.ClassRef(className) => + js.New(className, encodeMethodSym(ctor), genActualArgs(ctor, args)) + + case other => + throw new FatalError(s"Non ClassRef cannot be instantiated: $other") + } + } + } + + /** Gen JS code for a call to a constructor of a hijacked class. + * Reroute them to the `new` method with the same signature in the + * companion object. + */ + private def genNewHijackedClass(clazz: Symbol, ctor: Symbol, + args: List[js.Tree])(implicit pos: SourcePosition): js.Tree = { + + val className = encodeClassName(clazz) + val initName = encodeMethodSym(ctor).name + val newName = MethodName(newSimpleMethodName, initName.paramTypeRefs, + jstpe.ClassRef(className)) + val newMethodIdent = js.MethodIdent(newName) + + js.ApplyStatic(js.ApplyFlags.empty, className, newMethodIdent, args)( + jstpe.ClassType(className)) + } + + /** Gen JS code for a new of a JS class (subclass of `js.Any`). */ + private def genNewJSClass(tree: Apply): js.Tree = { + acquireContextualJSClassValue { jsClassValue => + implicit val pos: Position = tree.span + + val Apply(fun @ Select(New(tpt), _), args) = tree: @unchecked + val cls = tpt.tpe.typeSymbol + val ctor = fun.symbol + + val nestedJSClass = cls.isNestedJSClass + assert(jsClassValue.isDefined == nestedJSClass, + s"$cls at $pos: jsClassValue.isDefined = ${jsClassValue.isDefined} " + + s"but isInnerNonNativeJSClass = $nestedJSClass") + + def genArgs: List[js.TreeOrJSSpread] = genActualJSArgs(ctor, args) + def genArgsAsClassCaptures: List[js.Tree] = args.map(genExpr) + + jsClassValue.fold { + // Static JS class (by construction, it cannot be a module class, as their News do not reach the back-end) + if (cls == jsdefn.JSObjectClass && args.isEmpty) + js.JSObjectConstr(Nil) + else if (cls == jsdefn.JSArrayClass && args.isEmpty) + js.JSArrayConstr(Nil) + else + js.JSNew(genLoadJSConstructor(cls), genArgs) + } { jsClassVal => + // Nested JS class + if (cls.isAnonymousClass) + genNewAnonJSClass(cls, jsClassVal, genArgsAsClassCaptures)(fun.span) + else if (atPhase(erasurePhase)(cls.is(ModuleClass))) // LambdaLift removes the ModuleClass flag of lifted classes + js.JSNew(js.CreateJSClass(encodeClassName(cls), jsClassVal :: genArgsAsClassCaptures), Nil) + else + js.JSNew(jsClassVal, genArgs) + } + } + } + + /** Generate an instance of an anonymous (non-lambda) JS class inline + * + * @param sym Class to generate the instance of + * @param jsSuperClassValue JS class value of the super class + * @param args Arguments to the Scala constructor, which map to JS class captures + * @param pos Position of the original New tree + */ + private def genNewAnonJSClass(sym: Symbol, jsSuperClassValue: js.Tree, args: List[js.Tree])( + implicit pos: Position): js.Tree = { + assert(sym.isAnonymousClass, + s"Generating AnonJSClassNew of non anonymous JS class ${sym.fullName}") + + // Find the TypeDef for this anonymous class and generate it + val typeDef = consumeLazilyGeneratedAnonClass(sym) + val originalClassDef = resetAllScopedVars { + withScopedVars( + currentClassSym := sym + ) { + genNonNativeJSClass(typeDef) + } + } + + // Partition class members. + val privateFieldDefs = mutable.ListBuffer.empty[js.FieldDef] + val classDefMembers = mutable.ListBuffer.empty[js.MemberDef] + val instanceMembers = mutable.ListBuffer.empty[js.MemberDef] + var constructor: Option[js.JSConstructorDef] = None + + originalClassDef.memberDefs.foreach { + case fdef: js.FieldDef => + privateFieldDefs += fdef + + case fdef: js.JSFieldDef => + instanceMembers += fdef + + case mdef: js.MethodDef => + assert(mdef.flags.namespace.isStatic, + "Non-static, unexported method in non-native JS class") + classDefMembers += mdef + + case cdef: js.JSConstructorDef => + assert(constructor.isEmpty, "two ctors in class") + constructor = Some(cdef) + + case mdef: js.JSMethodDef => + assert(!mdef.flags.namespace.isStatic, "Exported static method") + instanceMembers += mdef + + case property: js.JSPropertyDef => + instanceMembers += property + + case nativeMemberDef: js.JSNativeMemberDef => + throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) + } + + assert(originalClassDef.topLevelExportDefs.isEmpty, + "Found top-level exports in anonymous JS class at " + pos) + + // Make new class def with static members + val newClassDef = { + implicit val pos = originalClassDef.pos + val parent = js.ClassIdent(jsNames.ObjectClass) + js.ClassDef(originalClassDef.name, originalClassDef.originalName, + ClassKind.AbstractJSType, None, Some(parent), interfaces = Nil, + jsSuperClass = None, jsNativeLoadSpec = None, + classDefMembers.toList, Nil)( + originalClassDef.optimizerHints) + } + + generatedClasses += newClassDef + + // Construct inline class definition + + val jsClassCaptures = originalClassDef.jsClassCaptures.getOrElse { + throw new AssertionError(s"no class captures for anonymous JS class at $pos") + } + val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { + throw new AssertionError("No ctor found") + } + assert(ctorParams.isEmpty && ctorRestParam.isEmpty, + s"non-empty constructor params for anonymous JS class at $pos") + + /* The first class capture is always a reference to the super class. + * This is enforced by genJSClassCapturesAndConstructor. + */ + def jsSuperClassRef(implicit pos: ir.Position): js.VarRef = + jsClassCaptures.head.ref + + /* The `this` reference. + * FIXME This could clash with a local variable of the constructor or a JS + * class capture. It seems Scala 2 has the same vulnerability. How do we + * avoid this? + */ + val selfName = freshLocalIdent("this")(pos) + def selfRef(implicit pos: ir.Position) = + js.VarRef(selfName)(jstpe.AnyType) + + def memberLambda(params: List[js.ParamDef], restParam: Option[js.ParamDef], body: js.Tree)(implicit pos: ir.Position): js.Closure = + js.Closure(arrow = false, captureParams = Nil, params, restParam, body, captureValues = Nil) + + val memberDefinitions0 = instanceMembers.toList.map { + case fdef: js.FieldDef => + throw new AssertionError("unexpected FieldDef") + + case fdef: js.JSFieldDef => + implicit val pos = fdef.pos + js.Assign(js.JSSelect(selfRef, fdef.name), jstpe.zeroOf(fdef.ftpe)) + + case mdef: js.MethodDef => + throw new AssertionError("unexpected MethodDef") + + case cdef: js.JSConstructorDef => + throw new AssertionError("unexpected JSConstructorDef") + + case mdef: js.JSMethodDef => + implicit val pos = mdef.pos + val impl = memberLambda(mdef.args, mdef.restParam, mdef.body) + js.Assign(js.JSSelect(selfRef, mdef.name), impl) + + case pdef: js.JSPropertyDef => + implicit val pos = pdef.pos + val optGetter = pdef.getterBody.map { body => + js.StringLiteral("get") -> memberLambda(params = Nil, restParam = None, body) + } + val optSetter = pdef.setterArgAndBody.map { case (arg, body) => + js.StringLiteral("set") -> memberLambda(params = arg :: Nil, restParam = None, body) + } + val descriptor = js.JSObjectConstr( + optGetter.toList ::: + optSetter.toList ::: + List(js.StringLiteral("configurable") -> js.BooleanLiteral(true)) + ) + js.JSMethodApply(js.JSGlobalRef("Object"), + js.StringLiteral("defineProperty"), + List(selfRef, pdef.name, descriptor)) + + case nativeMemberDef: js.JSNativeMemberDef => + throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) + } + + val memberDefinitions = if (privateFieldDefs.isEmpty) { + memberDefinitions0 + } else { + /* Private fields, declared in FieldDefs, are stored in a separate + * object, itself stored as a non-enumerable field of the `selfRef`. + * The name of that field is retrieved at + * `scala.scalajs.runtime.privateFieldsSymbol()`, and is a Symbol if + * supported, or a randomly generated string that has the same enthropy + * as a UUID (i.e., 128 random bits). + * + * This encoding solves two issues: + * + * - Hide private fields in anonymous JS classes from `JSON.stringify` + * and other cursory inspections in JS (#2748). + * - Get around the fact that abstract JS types cannot declare + * FieldDefs (#3777). + */ + val fieldsObjValue = { + js.JSObjectConstr(privateFieldDefs.toList.map { fdef => + implicit val pos = fdef.pos + js.StringLiteral(fdef.name.name.nameString) -> jstpe.zeroOf(fdef.ftpe) + }) + } + val definePrivateFieldsObj = { + /* Object.defineProperty(selfRef, privateFieldsSymbol, { + * value: fieldsObjValue + * }); + * + * `writable`, `configurable` and `enumerable` are false by default. + */ + js.JSMethodApply( + js.JSGlobalRef("Object"), + js.StringLiteral("defineProperty"), + List( + selfRef, + genPrivateFieldsSymbol()(using sym.sourcePos), + js.JSObjectConstr(List( + js.StringLiteral("value") -> fieldsObjValue + )) + ) + ) + } + definePrivateFieldsObj :: memberDefinitions0 + } + + // Transform the constructor body. + val inlinedCtorStats: List[js.Tree] = { + val beforeSuper = ctorBody.beforeSuper + + val superCall = { + implicit val pos = ctorBody.superCall.pos + val js.JSSuperConstructorCall(args) = ctorBody.superCall + + val newTree = { + val ident = originalClassDef.superClass.getOrElse(throw new FatalError("No superclass")) + if (args.isEmpty && ident.name == JSObjectClassName) + js.JSObjectConstr(Nil) + else + js.JSNew(jsSuperClassRef, args) + } + + val selfVarDef = js.VarDef(selfName, thisOriginalName, jstpe.AnyType, mutable = false, newTree) + selfVarDef :: memberDefinitions + } + + // After the super call, substitute `selfRef` for `This()` + val afterSuper = new ir.Transformers.Transformer { + override def transform(tree: js.Tree, isStat: Boolean): js.Tree = tree match { + case js.This() => + selfRef(tree.pos) + + // Don't traverse closure boundaries + case closure: js.Closure => + val newCaptureValues = closure.captureValues.map(transformExpr) + closure.copy(captureValues = newCaptureValues)(closure.pos) + + case tree => + super.transform(tree, isStat) + } + }.transformStats(ctorBody.afterSuper) + + beforeSuper ::: superCall ::: afterSuper + } + + val closure = js.Closure(arrow = true, jsClassCaptures, Nil, None, + js.Block(inlinedCtorStats, selfRef), jsSuperClassValue :: args) + js.JSFunctionApply(closure, Nil) + } + + /** Gen JS code for a primitive method call. */ + private def genPrimitiveOp(tree: Apply, isStat: Boolean): js.Tree = { + import dotty.tools.backend.ScalaPrimitivesOps._ + + implicit val pos = tree.span + + val Apply(fun, args) = tree + val receiver = qualifierOf(fun) + + val code = primitives.getPrimitive(tree, receiver.tpe) + + if (isArithmeticOp(code) || isLogicalOp(code) || isComparisonOp(code)) + genSimpleOp(tree, receiver :: args, code) + else if (code == CONCAT) + genStringConcat(tree, receiver, args) + else if (code == HASH) + genScalaHash(tree, receiver) + else if (isArrayOp(code)) + genArrayOp(tree, code) + else if (code == SYNCHRONIZED) + genSynchronized(tree, isStat) + else if (isCoercion(code)) + genCoercion(tree, receiver, code) + else if (code == JSPrimitives.THROW) + genThrow(tree, args) + else if (JSPrimitives.isJSPrimitive(code)) + genJSPrimitive(tree, args, code, isStat) + else + throw new FatalError(s"Unknown primitive: ${tree.symbol.fullName} at: $pos") + } + + /** Gen JS code for a simple operation (arithmetic, logical, or comparison) */ + private def genSimpleOp(tree: Apply, args: List[Tree], code: Int): js.Tree = { + args match { + case List(arg) => genSimpleUnaryOp(tree, arg, code) + case List(lhs, rhs) => genSimpleBinaryOp(tree, lhs, rhs, code) + case _ => throw new FatalError("Incorrect arity for primitive") + } + } + + /** Gen JS code for a simple unary operation. */ + private def genSimpleUnaryOp(tree: Apply, arg: Tree, code: Int): js.Tree = { + import dotty.tools.backend.ScalaPrimitivesOps._ + + implicit val pos = tree.span + + val resultIRType = toIRType(tree.tpe) + val genArg = adaptPrimitive(genExpr(arg), resultIRType) + + (code: @switch) match { + case POS => + genArg + + case NEG => + (resultIRType: @unchecked) match { + case jstpe.IntType => + js.BinaryOp(js.BinaryOp.Int_-, js.IntLiteral(0), genArg) + case jstpe.LongType => + js.BinaryOp(js.BinaryOp.Long_-, js.LongLiteral(0), genArg) + case jstpe.FloatType => + js.BinaryOp(js.BinaryOp.Float_*, js.FloatLiteral(-1.0f), genArg) + case jstpe.DoubleType => + js.BinaryOp(js.BinaryOp.Double_*, js.DoubleLiteral(-1.0), genArg) + } + + case NOT => + (resultIRType: @unchecked) match { + case jstpe.IntType => + js.BinaryOp(js.BinaryOp.Int_^, js.IntLiteral(-1), genArg) + case jstpe.LongType => + js.BinaryOp(js.BinaryOp.Long_^, js.LongLiteral(-1), genArg) + } + + case ZNOT => + js.UnaryOp(js.UnaryOp.Boolean_!, genArg) + + case _ => + throw new FatalError("Unknown unary operation code: " + code) + } + } + + /** Gen JS code for a simple binary operation. */ + private def genSimpleBinaryOp(tree: Apply, lhs: Tree, rhs: Tree, code: Int): js.Tree = { + import dotty.tools.backend.ScalaPrimitivesOps._ + + implicit val pos: SourcePosition = tree.sourcePos + + val lhsIRType = toIRType(lhs.tpe) + val rhsIRType = toIRType(rhs.tpe) + + val isShift = isShiftOp(code) + + val opType = { + if (isShift) { + if (lhsIRType == jstpe.LongType) jstpe.LongType + else jstpe.IntType + } else { + (lhsIRType, rhsIRType) match { + case (jstpe.DoubleType, _) | (_, jstpe.DoubleType) => jstpe.DoubleType + case (jstpe.FloatType, _) | (_, jstpe.FloatType) => jstpe.FloatType + case (jstpe.LongType, _) | (_, jstpe.LongType) => jstpe.LongType + case (jstpe.IntType | jstpe.ByteType | jstpe.ShortType | jstpe.CharType, _) => jstpe.IntType + case (_, jstpe.IntType | jstpe.ByteType | jstpe.ShortType | jstpe.CharType) => jstpe.IntType + case (jstpe.BooleanType, _) | (_, jstpe.BooleanType) => jstpe.BooleanType + case _ => jstpe.AnyType + } + } + } + + val lsrc = + if (opType == jstpe.AnyType) genExpr(lhs) + else adaptPrimitive(genExpr(lhs), opType) + val rsrc = + if (opType == jstpe.AnyType) genExpr(rhs) + else adaptPrimitive(genExpr(rhs), if (isShift) jstpe.IntType else opType) + + if (opType == jstpe.AnyType && isUniversalEqualityOp(code)) { + genUniversalEqualityOp(lhs.tpe, rhs.tpe, lsrc, rsrc, code) + } else if (code == ZOR) { + js.If(lsrc, js.BooleanLiteral(true), rsrc)(jstpe.BooleanType) + } else if (code == ZAND) { + js.If(lsrc, rsrc, js.BooleanLiteral(false))(jstpe.BooleanType) + } else { + import js.BinaryOp._ + + (opType: @unchecked) match { + case jstpe.IntType => + val op = (code: @switch) match { + case ADD => Int_+ + case SUB => Int_- + case MUL => Int_* + case DIV => Int_/ + case MOD => Int_% + case OR => Int_| + case AND => Int_& + case XOR => Int_^ + case LSL => Int_<< + case LSR => Int_>>> + case ASR => Int_>> + + case EQ => Int_== + case NE => Int_!= + case LT => Int_< + case LE => Int_<= + case GT => Int_> + case GE => Int_>= + } + js.BinaryOp(op, lsrc, rsrc) + + case jstpe.FloatType => + def withFloats(op: Int): js.Tree = + js.BinaryOp(op, lsrc, rsrc) + + def toDouble(value: js.Tree): js.Tree = + js.UnaryOp(js.UnaryOp.FloatToDouble, value) + + def withDoubles(op: Int): js.Tree = + js.BinaryOp(op, toDouble(lsrc), toDouble(rsrc)) + + (code: @switch) match { + case ADD => withFloats(Float_+) + case SUB => withFloats(Float_-) + case MUL => withFloats(Float_*) + case DIV => withFloats(Float_/) + case MOD => withFloats(Float_%) + + case EQ => withDoubles(Double_==) + case NE => withDoubles(Double_!=) + case LT => withDoubles(Double_<) + case LE => withDoubles(Double_<=) + case GT => withDoubles(Double_>) + case GE => withDoubles(Double_>=) + } + + case jstpe.DoubleType => + val op = (code: @switch) match { + case ADD => Double_+ + case SUB => Double_- + case MUL => Double_* + case DIV => Double_/ + case MOD => Double_% + + case EQ => Double_== + case NE => Double_!= + case LT => Double_< + case LE => Double_<= + case GT => Double_> + case GE => Double_>= + } + js.BinaryOp(op, lsrc, rsrc) + + case jstpe.LongType => + val op = (code: @switch) match { + case ADD => Long_+ + case SUB => Long_- + case MUL => Long_* + case DIV => Long_/ + case MOD => Long_% + case OR => Long_| + case XOR => Long_^ + case AND => Long_& + case LSL => Long_<< + case LSR => Long_>>> + case ASR => Long_>> + + case EQ => Long_== + case NE => Long_!= + case LT => Long_< + case LE => Long_<= + case GT => Long_> + case GE => Long_>= + } + js.BinaryOp(op, lsrc, rsrc) + + case jstpe.BooleanType => + val op = (code: @switch) match { + case EQ => Boolean_== + case NE => Boolean_!= + case OR => Boolean_| + case AND => Boolean_& + case XOR => Boolean_!= + } + js.BinaryOp(op, lsrc, rsrc) + + case jstpe.AnyType => + val op = code match { + case ID => === + case NI => !== + } + js.BinaryOp(op, lsrc, rsrc) + } + } + } + + private def adaptPrimitive(value: js.Tree, to: jstpe.Type)( + implicit pos: Position): js.Tree = { + genConversion(value.tpe, to, value) + } + + /* This method corresponds to the method of the same name in + * BCodeBodyBuilder of the JVM back-end. It ends up calling the method + * BCodeIdiomatic.emitT2T, whose logic we replicate here. + */ + private def genConversion(from: jstpe.Type, to: jstpe.Type, value: js.Tree)( + implicit pos: Position): js.Tree = { + import js.UnaryOp._ + + if (from == to || from == jstpe.NothingType) { + value + } else if (from == jstpe.BooleanType || to == jstpe.BooleanType) { + throw new AssertionError(s"Invalid genConversion from $from to $to") + } else { + def intValue = (from: @unchecked) match { + case jstpe.IntType => value + case jstpe.CharType => js.UnaryOp(CharToInt, value) + case jstpe.ByteType => js.UnaryOp(ByteToInt, value) + case jstpe.ShortType => js.UnaryOp(ShortToInt, value) + case jstpe.LongType => js.UnaryOp(LongToInt, value) + case jstpe.FloatType => js.UnaryOp(DoubleToInt, js.UnaryOp(FloatToDouble, value)) + case jstpe.DoubleType => js.UnaryOp(DoubleToInt, value) + } + + def doubleValue = from match { + case jstpe.DoubleType => value + case jstpe.FloatType => js.UnaryOp(FloatToDouble, value) + case jstpe.LongType => js.UnaryOp(LongToDouble, value) + case _ => js.UnaryOp(IntToDouble, intValue) + } + + (to: @unchecked) match { + case jstpe.CharType => + js.UnaryOp(IntToChar, intValue) + case jstpe.ByteType => + js.UnaryOp(IntToByte, intValue) + case jstpe.ShortType => + js.UnaryOp(IntToShort, intValue) + case jstpe.IntType => + intValue + case jstpe.LongType => + from match { + case jstpe.FloatType | jstpe.DoubleType => + js.UnaryOp(DoubleToLong, doubleValue) + case _ => + js.UnaryOp(IntToLong, intValue) + } + case jstpe.FloatType => + if (from == jstpe.LongType) + js.UnaryOp(js.UnaryOp.LongToFloat, value) + else + js.UnaryOp(js.UnaryOp.DoubleToFloat, doubleValue) + case jstpe.DoubleType => + doubleValue + } + } + } + + /** Gen JS code for a universal equality test. */ + private def genUniversalEqualityOp(ltpe: Type, rtpe: Type, lhs: js.Tree, rhs: js.Tree, code: Int)( + implicit pos: SourcePosition): js.Tree = { + + import dotty.tools.backend.ScalaPrimitivesOps._ + + val bypassEqEq = { + // Do not call equals if we have a literal null at either side. + lhs.isInstanceOf[js.Null] || + rhs.isInstanceOf[js.Null] + } + + if (bypassEqEq) { + js.BinaryOp( + if (code == EQ) js.BinaryOp.=== else js.BinaryOp.!==, + lhs, rhs) + } else { + val body = genEqEqPrimitive(ltpe, rtpe, lhs, rhs) + if (code == EQ) body + else js.UnaryOp(js.UnaryOp.Boolean_!, body) + } + } + + private lazy val externalEqualsNumNum: Symbol = + defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) + private lazy val externalEqualsNumChar: Symbol = + NoSymbol // requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private + private lazy val externalEqualsNumObject: Symbol = + defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) + private lazy val externalEquals: Symbol = + defn.BoxesRunTimeModule.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol + + /** Gen JS code for a call to Any.== */ + private def genEqEqPrimitive(ltpe: Type, rtpe: Type, lsrc: js.Tree, rsrc: js.Tree)( + implicit pos: SourcePosition): js.Tree = { + report.debuglog(s"$ltpe == $rtpe") + val lsym = ltpe.typeSymbol.asClass + val rsym = rtpe.typeSymbol.asClass + + /* True if the equality comparison is between values that require the + * use of the rich equality comparator + * (scala.runtime.BoxesRunTime.equals). + * This is the case when either side of the comparison might have a + * run-time type subtype of java.lang.Number or java.lang.Character, + * **which includes when either is a JS type**. + * When it is statically known that both sides are equal and subtypes of + * Number or Character, not using the rich equality is possible (their + * own equals method will do ok), except for java.lang.Float and + * java.lang.Double: their `equals` have different behavior around `NaN` + * and `-0.0`, see Javadoc (scala-dev#329, scala-js#2799). + */ + val mustUseAnyComparator: Boolean = { + lsym.isJSType || rsym.isJSType || { + val p = ctx.platform + p.isMaybeBoxed(lsym) && p.isMaybeBoxed(rsym) && { + val areSameFinals = lsym.is(Final) && rsym.is(Final) && (ltpe =:= rtpe) + !areSameFinals || lsym == defn.BoxedFloatClass || lsym == defn.BoxedDoubleClass + } + } + } + + if (mustUseAnyComparator) { + val equalsMethod: Symbol = { + val ptfm = ctx.platform + if (lsym.derivesFrom(defn.BoxedNumberClass)) { + if (rsym.derivesFrom(defn.BoxedNumberClass)) externalEqualsNumNum + else if (rsym.derivesFrom(defn.BoxedCharClass)) externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030 + else externalEqualsNumObject + } else externalEquals + } + genApplyStatic(equalsMethod, List(lsrc, rsrc)) + } else { + // if (lsrc eq null) rsrc eq null else lsrc.equals(rsrc) + if (lsym == defn.StringClass) { + // String.equals(that) === (this eq that) + js.BinaryOp(js.BinaryOp.===, lsrc, rsrc) + } else { + /* This requires to evaluate both operands in local values first. + * The optimizer will eliminate them if possible. + */ + val ltemp = js.VarDef(freshLocalIdent(), NoOriginalName, lsrc.tpe, mutable = false, lsrc) + val rtemp = js.VarDef(freshLocalIdent(), NoOriginalName, rsrc.tpe, mutable = false, rsrc) + js.Block( + ltemp, + rtemp, + js.If(js.BinaryOp(js.BinaryOp.===, ltemp.ref, js.Null()), + js.BinaryOp(js.BinaryOp.===, rtemp.ref, js.Null()), + genApplyMethod(ltemp.ref, defn.Any_equals, List(rtemp.ref)))( + jstpe.BooleanType)) + } + } + } + + /** Gen JS code for string concatenation. + */ + private def genStringConcat(tree: Apply, receiver: Tree, + args: List[Tree]): js.Tree = { + implicit val pos = tree.span + + js.BinaryOp(js.BinaryOp.String_+, genExpr(receiver), genExpr(args.head)) + } + + /** Gen JS code for a call to Any.## */ + private def genScalaHash(tree: Apply, receiver: Tree): js.Tree = { + implicit val pos: SourcePosition = tree.sourcePos + + genModuleApplyMethod(defn.ScalaRuntimeModule.requiredMethod(nme.hash_), + List(genExpr(receiver))) + } + + /** Gen JS code for an array operation (get, set or length) */ + private def genArrayOp(tree: Tree, code: Int): js.Tree = { + import dotty.tools.backend.ScalaPrimitivesOps._ + + implicit val pos = tree.span + + val Apply(fun, args) = tree: @unchecked + val arrayObj = qualifierOf(fun) + + val genArray = genExpr(arrayObj) + val genArgs = args.map(genExpr) + + def elementType: Type = arrayObj.tpe.widenDealias match { + case defn.ArrayOf(el) => el + case JavaArrayType(el) => el + case tpe => + val msg = em"expected Array $tpe" + report.error(msg) + ErrorType(msg) + } + + def genSelect(): js.AssignLhs = + js.ArraySelect(genArray, genArgs(0))(toIRType(elementType)) + + if (isArrayGet(code)) { + // get an item of the array + assert(args.length == 1, + s"Array get requires 1 argument, found ${args.length} in $tree") + genSelect() + } else if (isArraySet(code)) { + // set an item of the array + assert(args.length == 2, + s"Array set requires 2 arguments, found ${args.length} in $tree") + js.Assign(genSelect(), genArgs(1)) + } else { + // length of the array + js.ArrayLength(genArray) + } + } + + /** Gen JS code for a call to AnyRef.synchronized */ + private def genSynchronized(tree: Apply, isStat: Boolean): js.Tree = { + /* JavaScript is single-threaded, so we can drop the + * synchronization altogether. + */ + val Apply(fun, List(arg)) = tree + val receiver = qualifierOf(fun) + + val genReceiver = genExpr(receiver) + val genArg = genStatOrExpr(arg, isStat) + + genReceiver match { + case js.This() => + // common case for which there is no side-effect nor NPE + genArg + case _ => + implicit val pos = tree.span + js.Block( + js.If(js.BinaryOp(js.BinaryOp.===, genReceiver, js.Null()), + js.Throw(js.New(NullPointerExceptionClass, js.MethodIdent(jsNames.NoArgConstructorName), Nil)), + js.Skip())(jstpe.NoType), + genArg) + } + } + + /** Gen JS code for a coercion */ + private def genCoercion(tree: Apply, receiver: Tree, code: Int): js.Tree = { + implicit val pos = tree.span + + val source = genExpr(receiver) + val resultType = toIRType(tree.tpe) + adaptPrimitive(source, resultType) + } + + /** Gen a call to the special `throw` method. */ + private def genThrow(tree: Apply, args: List[Tree]): js.Tree = { + implicit val pos: SourcePosition = tree.sourcePos + val exception = args.head + val genException = genExpr(exception) + genException match { + case js.New(cls, _, _) if cls != JavaScriptExceptionClassName => + // Common case where ex is neither null nor a js.JavaScriptException + js.Throw(genException) + case _ => + js.Throw(js.UnwrapFromThrowable(genException)) + } + } + + /** Gen a "normal" apply (to a true method). + * + * But even these are further refined into: + * * Methods of java.lang.String, which are redirected to the + * RuntimeString trait implementation. + * * Calls to methods of raw JS types (Scala.js -> JS interop) + * * Calls to methods in impl classes of Scala2 traits. + * * Regular method call + */ + private def genNormalApply(tree: Apply, isStat: Boolean): js.Tree = { + implicit val pos = tree.span + + val fun = tree.fun match { + case fun: Ident => desugarIdent(fun).get + case fun: Select => fun + } + val receiver = fun.qualifier + val args = tree.args + val sym = fun.symbol + + def isStringMethodFromObject: Boolean = sym.name match { + case nme.toString_ | nme.equals_ | nme.hashCode_ => true + case _ => false + } + + if (isMethodStaticInIR(sym)) { + genApplyStatic(sym, genActualArgs(sym, args)) + } else if (sym.owner.isJSType) { + if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) + genApplyJSMethodGeneric(sym, genExprOrGlobalScope(receiver), genActualJSArgs(sym, args), isStat)(tree.sourcePos) + else + genApplyJSClassMethod(genExpr(receiver), sym, genActualArgs(sym, args)) + } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { + genJSNativeMemberCall(tree) + } else { + genApplyMethodMaybeStatically(genExpr(receiver), sym, genActualArgs(sym, args)) + } + } + + /** Gen JS code for a call to a JS method (of a subclass of `js.Any`). + * + * Basically it boils down to calling the method as a `JSBracketSelect`, + * without name mangling. But other aspects come into play: + * + * - Operator methods are translated to JS operators (not method calls) + * - `apply` is translated as a function call, i.e., `o()` instead of `o.apply()` + * - Scala varargs are turned into JS varargs (see `genPrimitiveJSArgs()`) + * - Getters and parameterless methods are translated as `JSBracketSelect` + * - Setters are translated to `Assign` to `JSBracketSelect` + */ + private def genApplyJSMethodGeneric(sym: Symbol, + receiver: MaybeGlobalScope, args: List[js.TreeOrJSSpread], isStat: Boolean, + jsSuperClassValue: Option[js.Tree] = None)( + implicit pos: SourcePosition): js.Tree = { + + def argsNoSpread: List[js.Tree] = { + assert(!args.exists(_.isInstanceOf[js.JSSpread]), s"Unexpected spread at $pos") + args.asInstanceOf[List[js.Tree]] + } + + val argc = args.size // meaningful only for methods that don't have varargs + + def requireNotSuper(): Unit = { + if (jsSuperClassValue.isDefined) + report.error("Illegal super call in Scala.js-defined JS class", pos) + } + + def requireNotSpread(arg: js.TreeOrJSSpread): js.Tree = + arg.asInstanceOf[js.Tree] + + def genSuperReference(propName: js.Tree): js.AssignLhs = { + jsSuperClassValue.fold[js.AssignLhs] { + genJSSelectOrGlobalRef(receiver, propName) + } { superClassValue => + js.JSSuperSelect(superClassValue, ruleOutGlobalScope(receiver), propName) + } + } + + def genSelectGet(propName: js.Tree): js.Tree = + genSuperReference(propName) + + def genSelectSet(propName: js.Tree, value: js.Tree): js.Tree = + js.Assign(genSuperReference(propName), value) + + def genCall(methodName: js.Tree, args: List[js.TreeOrJSSpread]): js.Tree = { + jsSuperClassValue.fold[js.Tree] { + genJSMethodApplyOrGlobalRefApply(receiver, methodName, args) + } { superClassValue => + js.JSSuperMethodCall(superClassValue, ruleOutGlobalScope(receiver), methodName, args) + } + } + + val boxedResult = sym.jsCallingConvention match { + case JSCallingConvention.UnaryOp(code) => + requireNotSuper() + assert(argc == 0, s"bad argument count ($argc) for unary op at $pos") + js.JSUnaryOp(code, ruleOutGlobalScope(receiver)) + + case JSCallingConvention.BinaryOp(code) => + requireNotSuper() + assert(argc == 1, s"bad argument count ($argc) for binary op at $pos") + js.JSBinaryOp(code, ruleOutGlobalScope(receiver), requireNotSpread(args.head)) + + case JSCallingConvention.Call => + requireNotSuper() + if (sym.owner.isSubClass(jsdefn.JSThisFunctionClass)) + js.JSMethodApply(ruleOutGlobalScope(receiver), js.StringLiteral("call"), args) + else + js.JSFunctionApply(ruleOutGlobalScope(receiver), args) + + case JSCallingConvention.Property(jsName) => + argsNoSpread match { + case Nil => + genSelectGet(genExpr(jsName)) + case value :: Nil => + genSelectSet(genExpr(jsName), value) + case _ => + throw new AssertionError(s"property methods should have 0 or 1 non-varargs arguments at $pos") + } + + case JSCallingConvention.BracketAccess => + argsNoSpread match { + case keyArg :: Nil => + genSelectGet(keyArg) + case keyArg :: valueArg :: Nil => + genSelectSet(keyArg, valueArg) + case _ => + throw new AssertionError(s"@JSBracketAccess methods should have 1 or 2 non-varargs arguments at $pos") + } + + case JSCallingConvention.BracketCall => + val (methodName, actualArgs) = extractFirstArg(args) + genCall(methodName, actualArgs) + + case JSCallingConvention.Method(jsName) => + genCall(genExpr(jsName), args) + } + + if (isStat) { + boxedResult + } else { + val tpe = atPhase(elimErasedValueTypePhase) { + sym.info.finalResultType + } + if (tpe.isRef(defn.BoxedUnitClass) && sym.isGetter) { + /* Work around to reclaim Scala 2 erasure behavior, assumed by the test + * NonNativeJSTypeTest.defaultValuesForFields. + * Scala 2 erases getters of `Unit`-typed fields as returning `Unit` + * (not `BoxedUnit`). Therefore, when called in expression position, + * the call site introduces an explicit `BoxedUnit.UNIT`. Even if the + * field has not been initialized at all (with `= _`), this results in + * an actual `()` value. + * In Scala 3, the same pattern returns `null`, as a `BoxedUnit`, so we + * introduce here an explicit `()` value. + * TODO We should remove this branch if the upstream test is updated + * not to assume such a strict interpretation of erasure. + */ + js.Block(boxedResult, js.Undefined()) + } else { + unbox(boxedResult, tpe) + } + } + } + + /** Extract the first argument in a list of actual arguments. + * + * This is nothing else than decomposing into head and tail, except that + * we assert that the first element is not a JSSpread. + */ + private def extractFirstArg(args: List[js.TreeOrJSSpread]): (js.Tree, List[js.TreeOrJSSpread]) = { + assert(args.nonEmpty, + "Trying to extract the first argument of an empty argument list") + val firstArg = args.head + assert(!firstArg.isInstanceOf[js.JSSpread], + "Trying to extract the first argument of an argument list starting " + + "with a Spread argument: " + firstArg) + (firstArg.asInstanceOf[js.Tree], args.tail) + } + + /** Gen JS code for a call to a native JS def or val. */ + private def genJSNativeMemberSelect(tree: Tree): js.Tree = + genJSNativeMemberSelectOrCall(tree, Nil) + + /** Gen JS code for a call to a native JS def or val. */ + private def genJSNativeMemberCall(tree: Apply): js.Tree = + genJSNativeMemberSelectOrCall(tree, tree.args) + + /** Gen JS code for a call to a native JS def or val. */ + private def genJSNativeMemberSelectOrCall(tree: Tree, args: List[Tree]): js.Tree = { + val sym = tree.symbol + + implicit val pos = tree.span + + val jsNativeMemberValue = + js.SelectJSNativeMember(encodeClassName(sym.owner), encodeJSNativeMemberSym(sym)) + + val boxedResult = + if (sym.isJSGetter) jsNativeMemberValue + else js.JSFunctionApply(jsNativeMemberValue, genActualJSArgs(sym, args)) + + unbox(boxedResult, atPhase(elimErasedValueTypePhase) { + sym.info.resultType + }) + } + + private def genJSSuperCall(tree: Apply, isStat: Boolean): js.Tree = { + acquireContextualJSClassValue { explicitJSSuperClassValue => + implicit val pos = tree.span + val Apply(fun @ Select(sup @ Super(qual, _), _), args) = tree: @unchecked + val sym = fun.symbol + + val genReceiver = genExpr(qual) + def genScalaArgs = genActualArgs(sym, args) + def genJSArgs = genActualJSArgs(sym, args) + + if (sym.owner == defn.ObjectClass) { + // Normal call anyway + assert(!sym.isClassConstructor, + s"Trying to call the super constructor of Object in a non-native JS class at $pos") + genApplyMethod(genReceiver, sym, genScalaArgs) + } else if (sym.isClassConstructor) { + throw new AssertionError( + s"calling a JS super constructor should have happened in genPrimaryJSClassCtor at $pos") + } else if (sym.owner.isNonNativeJSClass && !sym.isJSExposed) { + // Reroute to the static method + genApplyJSClassMethod(genReceiver, sym, genScalaArgs) + } else { + val jsSuperClassValue = explicitJSSuperClassValue.orElse { + Some(genLoadJSConstructor(currentClassSym.get.asClass.superClass)) + } + genApplyJSMethodGeneric(sym, MaybeGlobalScope.NotGlobalScope(genReceiver), + genJSArgs, isStat, jsSuperClassValue)(tree.sourcePos) + } + } + } + + /** Gen JS code for a call to a polymorphic method. + * + * The only methods that reach the back-end as polymorphic are + * `isInstanceOf` and `asInstanceOf`. + * + * (Well, in fact `DottyRunTime.newRefArray` too, but it is handled as a + * primitive instead.) + */ + private def genTypeApply(tree: TypeApply): js.Tree = { + implicit val pos: SourcePosition = tree.sourcePos + + val TypeApply(fun, targs) = tree + + val sym = fun.symbol + val receiver = qualifierOf(fun) + + val to = targs.head.tpe + + assert(!isPrimitiveValueType(receiver.tpe), + s"Found receiver of type test with primitive type ${receiver.tpe} at $pos") + assert(!isPrimitiveValueType(to), + s"Found target type of type test with primitive type ${receiver.tpe} at $pos") + + val genReceiver = genExpr(receiver) + + if (sym == defn.Any_asInstanceOf) { + genAsInstanceOf(genReceiver, to) + } else if (sym == defn.Any_isInstanceOf) { + genIsInstanceOf(genReceiver, to) + } else { + throw new FatalError( + s"Unexpected type application $fun with symbol ${sym.fullName}") + } + } + + /** Gen JS code for a Java Seq literal. */ + private def genJavaSeqLiteral(tree: JavaSeqLiteral): js.Tree = { + implicit val pos = tree.span + + val genElems = tree.elems.map(genExpr) + val arrayTypeRef = toTypeRef(tree.tpe).asInstanceOf[jstpe.ArrayTypeRef] + js.ArrayValue(arrayTypeRef, genElems) + } + + /** Gen JS code for a switch-`Match`, which is translated into an IR `js.Match`. */ + def genMatch(tree: Tree, isStat: Boolean): js.Tree = { + implicit val pos = tree.span + val Match(selector, cases) = tree: @unchecked + + def abortMatch(msg: String): Nothing = + throw new FatalError(s"$msg in switch-like pattern match at ${tree.span}: $tree") + + val genSelector = genExpr(selector) + + // Sanity check: we can handle Ints and Strings (including `null`s), but nothing else + genSelector.tpe match { + case jstpe.IntType | jstpe.ClassType(jsNames.BoxedStringClass) | jstpe.NullType | jstpe.NothingType => + // ok + case _ => + abortMatch(s"Invalid selector type ${genSelector.tpe}") + } + + val resultType = toIRType(tree.tpe) match { + case jstpe.NothingType => jstpe.NothingType // must take priority over NoType below + case _ if isStat => jstpe.NoType + case resType => resType + } + + var clauses: List[(List[js.MatchableLiteral], js.Tree)] = Nil + var optDefaultClause: Option[js.Tree] = None + + for (caze @ CaseDef(pat, guard, body) <- cases) { + if (guard != EmptyTree) + abortMatch("Found a case guard") + + val genBody = genStatOrExpr(body, isStat) + + def invalidCase(): Nothing = + abortMatch("Invalid case") + + def genMatchableLiteral(tree: Literal): js.MatchableLiteral = { + genExpr(tree) match { + case matchableLiteral: js.MatchableLiteral => matchableLiteral + case otherExpr => invalidCase() + } + } + + pat match { + case lit: Literal => + clauses = (List(genMatchableLiteral(lit)), genBody) :: clauses + case Ident(nme.WILDCARD) => + optDefaultClause = Some(genBody) + case Alternative(alts) => + val genAlts = alts.map { + case lit: Literal => genMatchableLiteral(lit) + case _ => invalidCase() + } + clauses = (genAlts, genBody) :: clauses + case _ => + invalidCase() + } + } + + clauses = clauses.reverse + val defaultClause = optDefaultClause.getOrElse { + throw new AssertionError("No elseClause in pattern match") + } + + /* Builds a `js.Match`, but simplifies it to a `js.If` if there is only + * one case with one alternative, and to a `js.Block` if there is no case + * at all. This happens in practice in the standard library. Having no + * case is a typical product of `match`es that are full of + * `case n if ... =>`, which are used instead of `if` chains for + * convenience and/or readability. + */ + def isInt(tree: js.Tree): Boolean = tree.tpe == jstpe.IntType + + clauses match { + case Nil => + // Completely remove the Match. Preserve the side-effects of `genSelector`. + js.Block(exprToStat(genSelector), defaultClause) + + case (uniqueAlt :: Nil, caseRhs) :: Nil => + /* Simplify the `match` as an `if`, so that the optimizer has less + * work to do, and we emit less code at the end of the day. + * Use `Int_==` instead of `===` if possible, since it is a common case. + */ + val op = + if (isInt(genSelector) && isInt(uniqueAlt)) js.BinaryOp.Int_== + else js.BinaryOp.=== + js.If(js.BinaryOp(op, genSelector, uniqueAlt), caseRhs, defaultClause)(resultType) + + case _ => + // We have more than one case: use a js.Match + js.Match(genSelector, clauses, defaultClause)(resultType) + } + } + + /** Gen JS code for a closure. + * + * Input: a `Closure` tree of the form + * {{{ + * Closure(env, call, functionalInterface) + * }}} + * representing the pseudo-syntax + * {{{ + * { (p1, ..., pm) => call(env1, ..., envn, p1, ..., pm) }: functionInterface + * }}} + * where `envi` are identifiers in the local scope. The qualifier of `call` + * is also implicitly captured. + * + * Output: a `js.Closure` tree of the form + * {{{ + * js.Closure(formalCaptures, formalParams, body, actualCaptures) + * }}} + * representing the pseudo-syntax + * {{{ + * lambda( + * formalParam1, ..., formalParamM) = body + * }}} + * where the `actualCaptures` and `body` are, in general, arbitrary + * expressions. But in this case, `actualCaptures` will be identifiers from + * `env`, and the `body` will be of the form + * {{{ + * call(formalCapture1.ref, ..., formalCaptureN.ref, + * formalParam1.ref, ...formalParamM.ref) + * }}} + * + * When the `js.Closure` node is evaluated, i.e., when the closure value is + * created, the expressions of the `actualCaptures` are evaluated, and the + * results of those evaluations is "stored" in the environment of the + * closure as the corresponding `formalCapture`. + * + * When we later *call* the closure, the `formalCaptures` already have their + * values from the environment, and they are available in the `body`. The + * `formalParams` of the created closure receive their values from the + * actual arguments at the call-site of the closure, and they are also + * available in the `body`. + */ + private def genClosure(tree: Closure): js.Tree = { + implicit val pos = tree.span + val Closure(env, call, functionalInterface) = tree + + val envSize = env.size + + val (fun, args) = call match { + // case Apply(fun, args) => (fun, args) // Conjectured not to happen + case t @ Select(_, _) => (t, Nil) + case t @ Ident(_) => (t, Nil) + } + val sym = fun.symbol + val isStaticCall = isMethodStaticInIR(sym) + + val qualifier = qualifierOf(fun) + val allCaptureValues = + if (isStaticCall) env + else qualifier :: env + + val formalAndActualCaptures = allCaptureValues.map { value => + implicit val pos = value.span + val (formalIdent, originalName) = value match { + case Ident(name) => (freshLocalIdent(name.toTermName), OriginalName(name.toString)) + case This(_) => (freshLocalIdent("this"), thisOriginalName) + case _ => (freshLocalIdent(), NoOriginalName) + } + val formalCapture = js.ParamDef(formalIdent, originalName, + toIRType(value.tpe), mutable = false) + val actualCapture = genExpr(value) + (formalCapture, actualCapture) + } + val (formalCaptures, actualCaptures) = formalAndActualCaptures.unzip + + val funInterfaceSym = functionalInterface.tpe.typeSymbol + val hasRepeatedParam = { + funInterfaceSym.exists && { + val Seq(samMethodDenot) = funInterfaceSym.info.possibleSamMethods + val samMethod = samMethodDenot.symbol + atPhase(elimRepeatedPhase)(samMethod.info.paramInfoss.flatten.exists(_.isRepeatedParam)) + } + } + + val formalParamNames = sym.info.paramNamess.flatten.drop(envSize) + val formalParamTypes = sym.info.paramInfoss.flatten.drop(envSize) + val formalParamRepeateds = + if (hasRepeatedParam) (0 until (formalParamTypes.size - 1)).map(_ => false) :+ true + else (0 until formalParamTypes.size).map(_ => false) + + val formalAndActualParams = formalParamNames.lazyZip(formalParamTypes).lazyZip(formalParamRepeateds).map { + (name, tpe, repeated) => + val formalParam = js.ParamDef(freshLocalIdent(name), + OriginalName(name.toString), jstpe.AnyType, mutable = false) + val actualParam = + if (repeated) genJSArrayToVarArgs(formalParam.ref)(tree.sourcePos) + else unbox(formalParam.ref, tpe) + (formalParam, actualParam) + } + val (formalAndRestParams, actualParams) = formalAndActualParams.unzip + + val (formalParams, restParam) = + if (hasRepeatedParam) (formalAndRestParams.init, Some(formalAndRestParams.last)) + else (formalAndRestParams, None) + + val genBody = { + val call = if (isStaticCall) { + genApplyStatic(sym, formalCaptures.map(_.ref) ::: actualParams) + } else { + val thisCaptureRef :: argCaptureRefs = formalCaptures.map(_.ref): @unchecked + if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) + genApplyMethodMaybeStatically(thisCaptureRef, sym, argCaptureRefs ::: actualParams) + else + genApplyJSClassMethod(thisCaptureRef, sym, argCaptureRefs ::: actualParams) + } + box(call, sym.info.finalResultType) + } + + val isThisFunction = funInterfaceSym.isSubClass(jsdefn.JSThisFunctionClass) && { + val ok = formalParams.nonEmpty + if (!ok) + report.error("The SAM or apply method for a js.ThisFunction must have a leading non-varargs parameter", tree) + ok + } + + if (isThisFunction) { + val thisParam :: otherParams = formalParams: @unchecked + js.Closure( + arrow = false, + formalCaptures, + otherParams, + restParam, + js.Block( + js.VarDef(thisParam.name, thisParam.originalName, + thisParam.ptpe, mutable = false, + js.This()(thisParam.ptpe)(thisParam.pos))(thisParam.pos), + genBody), + actualCaptures) + } else { + val closure = js.Closure(arrow = true, formalCaptures, formalParams, restParam, genBody, actualCaptures) + + if (!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym)) { + assert(!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym), + s"Invalid functional interface $funInterfaceSym reached the back-end") + val formalCount = formalParams.size + val cls = ClassName("scala.scalajs.runtime.AnonFunction" + formalCount) + val ctorName = MethodName.constructor( + jstpe.ClassRef(ClassName("scala.scalajs.js.Function" + formalCount)) :: Nil) + js.New(cls, js.MethodIdent(ctorName), List(closure)) + } else { + assert(funInterfaceSym.isJSType, + s"Invalid functional interface $funInterfaceSym reached the back-end") + closure + } + } + } + + /** Generates a static method instantiating and calling this + * DynamicImportThunk's `apply`: + * + * {{{ + * static def dynamicImport$;;Ljava.lang.Object(): any = { + * new .;:V().apply;Ljava.lang.Object() + * } + * }}} + */ + private def genDynamicImportForwarder(clsSym: Symbol)(using Position): js.MethodDef = { + withNewLocalNameScope { + val ctor = clsSym.primaryConstructor + val paramSyms = ctor.paramSymss.flatten + val paramDefs = paramSyms.map(genParamDef(_)) + + val body = { + val inst = js.New(encodeClassName(clsSym), encodeMethodSym(ctor), paramDefs.map(_.ref)) + genApplyMethod(inst, jsdefn.DynamicImportThunkClass_apply, Nil) + } + + js.MethodDef( + js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), + encodeDynamicImportForwarderIdent(paramSyms), + NoOriginalName, + paramDefs, + jstpe.AnyType, + Some(body))(OptimizerHints.empty, None) + } + } + + /** Boxes a value of the given type before `elimErasedValueType`. + * + * This should be used when sending values to a JavaScript context, which + * is erased/boxed at the IR level, although it is not erased at the + * dotty/JVM level. + * + * @param expr Tree to be boxed if needed. + * @param tpeEnteringElimErasedValueType The type of `expr` as it was + * entering the `elimErasedValueType` phase. + */ + def box(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = { + tpeEnteringElimErasedValueType match { + case tpe if isPrimitiveValueType(tpe) => + makePrimitiveBox(expr, tpe) + + case tpe: ErasedValueType => + val boxedClass = tpe.tycon.typeSymbol + val ctor = boxedClass.primaryConstructor + js.New(encodeClassName(boxedClass), encodeMethodSym(ctor), List(expr)) + + case _ => + expr + } + } + + /** Unboxes a value typed as Any to the given type before `elimErasedValueType`. + * + * This should be used when receiving values from a JavaScript context, + * which is erased/boxed at the IR level, although it is not erased at the + * dotty/JVM level. + * + * @param expr Tree to be extracted. + * @param tpeEnteringElimErasedValueType The type of `expr` as it was + * entering the `elimErasedValueType` phase. + */ + def unbox(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = { + tpeEnteringElimErasedValueType match { + case tpe if isPrimitiveValueType(tpe) => + makePrimitiveUnbox(expr, tpe) + + case tpe: ErasedValueType => + val boxedClass = tpe.tycon.typeSymbol.asClass + val unboxMethod = ValueClasses.valueClassUnbox(boxedClass) + val content = genApplyMethod( + js.AsInstanceOf(expr, encodeClassType(boxedClass)), unboxMethod, Nil) + if (unboxMethod.info.resultType <:< tpe.erasedUnderlying) + content + else + unbox(content, tpe.erasedUnderlying) + + case tpe => + genAsInstanceOf(expr, tpe) + } + } + + /** Gen JS code for an asInstanceOf cast (for reference types only) */ + private def genAsInstanceOf(value: js.Tree, to: Type)(implicit pos: Position): js.Tree = + genAsInstanceOf(value, toIRType(to)) + + /** Gen JS code for an asInstanceOf cast (for reference types only) */ + private def genAsInstanceOf(value: js.Tree, to: jstpe.Type)(implicit pos: Position): js.Tree = { + to match { + case jstpe.AnyType => + value + case jstpe.NullType => + js.If( + js.BinaryOp(js.BinaryOp.===, value, js.Null()), + js.Null(), + genThrowClassCastException())( + jstpe.NullType) + case jstpe.NothingType => + js.Block(value, genThrowClassCastException()) + case _ => + js.AsInstanceOf(value, to) + } + } + + private def genThrowClassCastException()(implicit pos: Position): js.Tree = { + js.Throw(js.New(jsNames.ClassCastExceptionClass, + js.MethodIdent(jsNames.NoArgConstructorName), Nil)) + } + + /** Gen JS code for an isInstanceOf test (for reference types only) */ + def genIsInstanceOf(value: js.Tree, to: Type)( + implicit pos: SourcePosition): js.Tree = { + val sym = to.typeSymbol + + if (sym == defn.ObjectClass) { + js.BinaryOp(js.BinaryOp.!==, value, js.Null()) + } else if (sym.isJSType) { + if (sym.is(Trait)) { + report.error( + em"isInstanceOf[${sym.fullName}] not supported because it is a JS trait", + pos) + js.BooleanLiteral(true) + } else { + js.AsInstanceOf(js.JSBinaryOp( + js.JSBinaryOp.instanceof, value, genLoadJSConstructor(sym)), + jstpe.BooleanType) + } + } else { + // The Scala type system prevents x.isInstanceOf[Null] and ...[Nothing] + assert(sym != defn.NullClass && sym != defn.NothingClass, + s"Found a .isInstanceOf[$sym] at $pos") + js.IsInstanceOf(value, toIRType(to)) + } + } + + /** Gen a statically linked call to an instance method. */ + def genApplyMethodMaybeStatically(receiver: js.Tree, method: Symbol, + arguments: List[js.Tree])(implicit pos: Position): js.Tree = { + if (method.isPrivate || method.isClassConstructor) + genApplyMethodStatically(receiver, method, arguments) + else + genApplyMethod(receiver, method, arguments) + } + + /** Gen a dynamically linked call to a Scala method. */ + def genApplyMethod(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( + implicit pos: Position): js.Tree = { + assert(!method.isPrivate, + s"Cannot generate a dynamic call to private method $method at $pos") + js.Apply(js.ApplyFlags.empty, receiver, encodeMethodSym(method), arguments)( + toIRType(patchedResultType(method))) + } + + /** Gen a statically linked call to an instance method. */ + def genApplyMethodStatically(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( + implicit pos: Position): js.Tree = { + val flags = js.ApplyFlags.empty + .withPrivate(method.isPrivate && !method.isClassConstructor) + .withConstructor(method.isClassConstructor) + js.ApplyStatically(flags, receiver, encodeClassName(method.owner), + encodeMethodSym(method), arguments)( + toIRType(patchedResultType(method))) + } + + /** Gen a call to a static method. */ + private def genApplyStatic(method: Symbol, arguments: List[js.Tree])( + implicit pos: Position): js.Tree = { + js.ApplyStatic(js.ApplyFlags.empty.withPrivate(method.isPrivate), + encodeClassName(method.owner), encodeMethodSym(method), arguments)( + toIRType(patchedResultType(method))) + } + + /** Gen a call to a non-exposed method of a non-native JS class. */ + def genApplyJSClassMethod(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( + implicit pos: Position): js.Tree = { + genApplyStatic(method, receiver :: arguments) + } + + /** Gen a call to a method of a Scala top-level module. */ + private def genModuleApplyMethod(methodSym: Symbol, arguments: List[js.Tree])( + implicit pos: SourcePosition): js.Tree = { + genApplyMethod(genLoadModule(methodSym.owner), methodSym, arguments) + } + + /** Gen a boxing operation (tpe is the primitive type) */ + private def makePrimitiveBox(expr: js.Tree, tpe: Type)( + implicit pos: Position): js.Tree = { + toIRType(tpe) match { + case jstpe.NoType => // for JS interop cases + js.Block(expr, js.Undefined()) + case jstpe.BooleanType | jstpe.CharType | jstpe.ByteType | + jstpe.ShortType | jstpe.IntType | jstpe.LongType | jstpe.FloatType | + jstpe.DoubleType => + expr // box is identity for all those primitive types + case typeRef => + throw new FatalError( + s"makePrimitiveBox requires a primitive type, found $typeRef for $tpe at $pos") + } + } + + /** Gen an unboxing operation (tpe is the primitive type) */ + private def makePrimitiveUnbox(expr: js.Tree, tpe: Type)( + implicit pos: Position): js.Tree = { + toIRType(tpe) match { + case jstpe.NoType => expr // for JS interop cases + case irTpe => js.AsInstanceOf(expr, irTpe) + } + } + + /** Gen JS code for a Scala.js-specific primitive method */ + private def genJSPrimitive(tree: Apply, args: List[Tree], code: Int, + isStat: Boolean): js.Tree = { + + import JSPrimitives._ + + implicit val pos = tree.span + + def genArgs1: js.Tree = { + assert(args.size == 1, + s"Expected exactly 1 argument for JS primitive $code but got " + + s"${args.size} at $pos") + genExpr(args.head) + } + + def genArgs2: (js.Tree, js.Tree) = { + assert(args.size == 2, + s"Expected exactly 2 arguments for JS primitive $code but got " + + s"${args.size} at $pos") + (genExpr(args.head), genExpr(args.tail.head)) + } + + def genArgsVarLength: List[js.TreeOrJSSpread] = + genActualJSArgs(tree.symbol, args) + + def resolveReifiedJSClassSym(arg: Tree): Symbol = { + def fail(): Symbol = { + report.error( + tree.symbol.name.toString + " must be called with a constant " + + "classOf[T] representing a class extending js.Any " + + "(not a trait nor an object)", + tree.sourcePos) + NoSymbol + } + arg match { + case Literal(value) if value.tag == Constants.ClazzTag => + val classSym = value.typeValue.typeSymbol + if (classSym.isJSType && !classSym.is(Trait) && !classSym.is(ModuleClass)) + classSym + else + fail() + case _ => + fail() + } + } + + (code: @switch) match { + case DYNNEW => + // js.Dynamic.newInstance(clazz)(actualArgs: _*) + val (jsClass, actualArgs) = extractFirstArg(genArgsVarLength) + js.JSNew(jsClass, actualArgs) + + case ARR_CREATE => + // js.Array(elements: _*) + js.JSArrayConstr(genArgsVarLength) + + case CONSTRUCTOROF => + // runtime.constructorOf(clazz) + val classSym = resolveReifiedJSClassSym(args.head) + if (classSym == NoSymbol) + js.Undefined() // compile error emitted by resolveReifiedJSClassSym + else + genLoadJSConstructor(classSym) + + case CREATE_INNER_JS_CLASS | CREATE_LOCAL_JS_CLASS => + // runtime.createInnerJSClass(clazz, superClass) + // runtime.createLocalJSClass(clazz, superClass, fakeNewInstances) + val classSym = resolveReifiedJSClassSym(args(0)) + val superClassValue = genExpr(args(1)) + if (classSym == NoSymbol) { + js.Undefined() // compile error emitted by resolveReifiedJSClassSym + } else { + val captureValues = { + if (code == CREATE_INNER_JS_CLASS) { + /* Private inner classes that do not actually access their outer + * pointer do not receive an outer argument. We therefore count + * the number of constructors that have non-empty param list to + * know how many times we need to pass `this`. + */ + val requiredThisParams = + classSym.info.decls.lookupAll(nme.CONSTRUCTOR).count(_.info.paramInfoss.head.nonEmpty) + val outer = genThis() + List.fill(requiredThisParams)(outer) + } else { + val fakeNewInstances = args(2).asInstanceOf[JavaSeqLiteral].elems + fakeNewInstances.flatMap(genCaptureValuesFromFakeNewInstance(_)) + } + } + js.CreateJSClass(encodeClassName(classSym), superClassValue :: captureValues) + } + + case WITH_CONTEXTUAL_JS_CLASS_VALUE => + // withContextualJSClassValue(jsclass, inner) + val jsClassValue = genExpr(args(0)) + withScopedVars( + contextualJSClassValue := Some(jsClassValue) + ) { + genStatOrExpr(args(1), isStat) + } + + case LINKING_INFO => + // runtime.linkingInfo + js.JSLinkingInfo() + + case DEBUGGER => + // js.special.debugger() + js.Debugger() + + case UNITVAL => + // BoxedUnit.UNIT, which is the boxed version of () + js.Undefined() + + case JS_NEW_TARGET => + // js.new.target + val valid = currentMethodSym.get.isClassConstructor && currentClassSym.isNonNativeJSClass + if (!valid) { + report.error( + "Illegal use of js.`new`.target.\n" + + "It can only be used in the constructor of a JS class, " + + "as a statement or in the rhs of a val or var.\n" + + "It cannot be used inside a lambda or by-name parameter, nor in any other location.", + tree.sourcePos) + } + js.JSNewTarget() + + case JS_IMPORT => + // js.import(arg) + val arg = genArgs1 + js.JSImportCall(arg) + + case JS_IMPORT_META => + // js.import.meta + js.JSImportMeta() + + case DYNAMIC_IMPORT => + // runtime.dynamicImport + assert(args.size == 1, + s"Expected exactly 1 argument for JS primitive $code but got " + + s"${args.size} at $pos") + + args.head match { + case Block(stats, expr @ Typed(Apply(fun @ Select(New(tpt), _), args), _)) => + /* stats is always empty if no other compiler plugin is present. + * However, code instrumentation (notably scoverage) might add + * statements here. If this is the case, the thunk anonymous class + * has already been created when the other plugin runs (i.e. the + * plugin ran after jsinterop). + * + * Therefore, it is OK to leave the statements on our side of the + * dynamic loading boundary. + */ + + val clsSym = tpt.symbol + val ctor = fun.symbol + + assert(clsSym.isSubClass(jsdefn.DynamicImportThunkClass), + s"expected subclass of DynamicImportThunk, got: $clsSym at: ${expr.sourcePos}") + assert(ctor.isPrimaryConstructor, + s"expected primary constructor, got: $ctor at: ${expr.sourcePos}") + + js.Block( + stats.map(genStat(_)), + js.ApplyDynamicImport( + js.ApplyFlags.empty, + encodeClassName(clsSym), + encodeDynamicImportForwarderIdent(ctor.paramSymss.flatten), + genActualArgs(ctor, args)) + ) + + case tree => + throw new FatalError( + s"Unexpected argument tree in dynamicImport: $tree/${tree.getClass} at: $pos") + } + + case JS_NATIVE => + // js.native + report.error( + "js.native may only be used as stub implementation in facade types", + tree.sourcePos) + js.Undefined() + + case TYPEOF => + // js.typeOf(arg) + val arg = genArgs1 + val typeofExpr = arg match { + case arg: js.JSGlobalRef => js.JSTypeOfGlobalRef(arg) + case _ => js.JSUnaryOp(js.JSUnaryOp.typeof, arg) + } + js.AsInstanceOf(typeofExpr, jstpe.ClassType(jsNames.BoxedStringClass)) + + case STRICT_EQ => + // js.special.strictEquals(arg1, arg2) + val (arg1, arg2) = genArgs2 + js.JSBinaryOp(js.JSBinaryOp.===, arg1, arg2) + + case IN => + // js.special.in(arg1, arg2) + val (arg1, arg2) = genArgs2 + js.AsInstanceOf(js.JSBinaryOp(js.JSBinaryOp.in, arg1, arg2), + jstpe.BooleanType) + + case INSTANCEOF => + // js.special.instanceof(arg1, arg2) + val (arg1, arg2) = genArgs2 + js.AsInstanceOf(js.JSBinaryOp(js.JSBinaryOp.instanceof, arg1, arg2), + jstpe.BooleanType) + + case DELETE => + // js.special.delete(arg1, arg2) + val (arg1, arg2) = genArgs2 + js.JSDelete(arg1, arg2) + + case FORIN => + /* js.special.forin(arg1, arg2) + * + * We must generate: + * + * val obj = arg1 + * val f = arg2 + * for (val key in obj) { + * f(key) + * } + * + * with temporary vals, because `arg2` must be evaluated only + * once, and after `arg1`. + */ + val (arg1, arg2) = genArgs2 + val objVarDef = js.VarDef(freshLocalIdent("obj"), NoOriginalName, + jstpe.AnyType, mutable = false, arg1) + val fVarDef = js.VarDef(freshLocalIdent("f"), NoOriginalName, + jstpe.AnyType, mutable = false, arg2) + val keyVarIdent = freshLocalIdent("key") + val keyVarRef = js.VarRef(keyVarIdent)(jstpe.AnyType) + js.Block( + objVarDef, + fVarDef, + js.ForIn(objVarDef.ref, keyVarIdent, NoOriginalName, { + js.JSFunctionApply(fVarDef.ref, List(keyVarRef)) + })) + + case JS_THROW => + // js.special.throw(arg) + js.Throw(genArgs1) + + case JS_TRY_CATCH => + /* js.special.tryCatch(arg1, arg2) + * + * We must generate: + * + * val body = arg1 + * val handler = arg2 + * try { + * body() + * } catch (e) { + * handler(e) + * } + * + * with temporary vals, because `arg2` must be evaluated before + * `body` executes. Moreover, exceptions thrown while evaluating + * the function values `arg1` and `arg2` must not be caught. + */ + val (arg1, arg2) = genArgs2 + val bodyVarDef = js.VarDef(freshLocalIdent("body"), NoOriginalName, + jstpe.AnyType, mutable = false, arg1) + val handlerVarDef = js.VarDef(freshLocalIdent("handler"), NoOriginalName, + jstpe.AnyType, mutable = false, arg2) + val exceptionVarIdent = freshLocalIdent("e") + val exceptionVarRef = js.VarRef(exceptionVarIdent)(jstpe.AnyType) + js.Block( + bodyVarDef, + handlerVarDef, + js.TryCatch( + js.JSFunctionApply(bodyVarDef.ref, Nil), + exceptionVarIdent, + NoOriginalName, + js.JSFunctionApply(handlerVarDef.ref, List(exceptionVarRef)) + )(jstpe.AnyType) + ) + + case WRAP_AS_THROWABLE => + // js.special.wrapAsThrowable(arg) + js.WrapAsThrowable(genArgs1) + + case UNWRAP_FROM_THROWABLE => + // js.special.unwrapFromThrowable(arg) + js.UnwrapFromThrowable(genArgs1) + + case UNION_FROM | UNION_FROM_TYPE_CONSTRUCTOR => + /* js.|.from and js.|.fromTypeConstructor + * We should not have to deal with those. They have a perfectly valid + * user-space implementation. However, the Dotty type checker inserts + * way too many of those, even when they are completely unnecessary. + * That still wouldn't be an issue ... if only it did not insert them + * around the default getters to their parameters! But even there it + * does it (although the types are, by construction, *equivalent*!), + * and that kills our `UndefinedParam` treatment. So we have to handle + * those two methods as primitives to completely eliminate them. + * + * Hopefully this will become unnecessary when/if we manage to + * reinterpret js.| as a true Dotty union type. + */ + genArgs2._1 + + case REFLECT_SELECTABLE_SELECTDYN => + // scala.reflect.Selectable.selectDynamic + genReflectiveCall(tree, isSelectDynamic = true) + case REFLECT_SELECTABLE_APPLYDYN => + // scala.reflect.Selectable.applyDynamic + genReflectiveCall(tree, isSelectDynamic = false) + } + } + + /** Gen the SJSIR for a reflective call. + * + * Reflective calls are calls to a structural type field or method that + * involve a reflective Selectable. They look like the following in source + * code: + * {{{ + * import scala.reflect.Selectable.reflectiveSelectable + * + * type Structural = { + * val foo: Int + * def bar(x: Int, y: String): String + * } + * + * val structural: Structural = new { + * val foo: Int = 5 + * def bar(x: Int, y: String): String = x.toString + y + * } + * + * structural.foo + * structural.bar(6, "hello") + * }}} + * + * After expansion by the Scala 3 rules for structural member selections and + * calls, they look like + * + * {{{ + * reflectiveSelectable(structural).selectDynamic("foo") + * reflectiveSelectable(structural).applyDynamic("bar", + * classOf[Int], classOf[String] + * )( + * 6, "hello" + * ) + * }}} + * + * When the original `structural` value is already of a subtype of + * `scala.reflect.Selectable`, there is no conversion involved. There could + * also be any other arbitrary conversion, such as the deprecated bridge for + * Scala 2's `import scala.language.reflectiveCalls`. In general, the shape + * is therefore the following, for some `selectable: reflect.Selectable`: + * + * {{{ + * selectable.selectDynamic("foo") + * selectable.applyDynamic("bar", + * classOf[Int], classOf[String] + * )( + * 6, "hello" + * ) + * }}} + * + * and eventually reaches the back-end as + * + * {{{ + * selectable.selectDynamic("foo") // same as above + * selectable.applyDynamic("bar", + * wrapRefArray([ classOf[Int], classOf[String] : jl.Class ] + * )( + * genericWrapArray([ Int.box(6), "hello" : Object ]) + * ) + * }}} + * + * In SJSIR, they must be encoded as follows: + * + * {{{ + * selectable.selectedValue;O().foo;R() + * selectable.selectedValue;O().bar;I;Ljava.lang.String;R( + * Int.box(6).asInstanceOf[int], + * "hello".asInstanceOf[java.lang.String] + * ) + * }}} + * + * where `selectedValue;O()` is declared in `scala.reflect.Selectable` and + * holds the actual instance on which to perform the reflective operations. + * For the typical use case from the first snippet, it returns `structural`. + * + * This means that we must deconstruct the elaborated calls to recover: + * + * - the method name as a compile-time string `foo` or `bar` + * - the `tp: Type`s that have been wrapped in `classOf[tp]`, as a + * compile-time List[Type], from which we'll derive `jstpe.Type`s for the + * `asInstanceOf`s and `jstpe.TypeRef`s for the `MethodName.reflectiveProxy` + * - the actual arguments as a compile-time `List[Tree]` + * + * Virtually all of the code in `genReflectiveCall` deals with recovering + * those elements. Constructing the IR Tree is the easy part after that. + */ + private def genReflectiveCall(tree: Apply, isSelectDynamic: Boolean): js.Tree = { + implicit val pos = tree.span + val Apply(fun @ Select(receiver, _), args) = tree: @unchecked + + val selectedValueTree = js.Apply(js.ApplyFlags.empty, genExpr(receiver), + js.MethodIdent(selectedValueMethodName), Nil)(jstpe.AnyType) + + // Extract the method name as a String + val methodNameStr = args.head match { + case Literal(Constants.Constant(name: String)) => + name + case _ => + report.error( + "The method name given to Selectable.selectDynamic or Selectable.applyDynamic " + + "must be a literal string. " + + "Other uses are not supported in Scala.js.", + args.head.sourcePos) + "erroneous" + } + + val (formalParamTypeRefs, actualArgs) = if (isSelectDynamic) { + (Nil, Nil) + } else { + // Extract the param type refs and actual args from the 2nd and 3rd argument to applyDynamic + args.tail match { + case WrapArray(classOfsArray: JavaSeqLiteral) :: WrapArray(actualArgsAnyArray: JavaSeqLiteral) :: Nil => + // Extract jstpe.Type's and jstpe.TypeRef's from the classOf[_] trees + val formalParamTypesAndTypeRefs = classOfsArray.elems.map { + // classOf[tp] -> tp + case Literal(const) if const.tag == Constants.ClazzTag => + toIRTypeAndTypeRef(const.typeValue) + // Anything else is invalid + case otherTree => + report.error( + "The java.lang.Class[_] arguments passed to Selectable.applyDynamic must be " + + "literal classOf[T] expressions (typically compiler-generated). " + + "Other uses are not supported in Scala.js.", + otherTree.sourcePos) + (jstpe.AnyType, jstpe.ClassRef(jsNames.ObjectClass)) + } + + // Gen the actual args, downcasting them to the formal param types + val actualArgs = actualArgsAnyArray.elems.zip(formalParamTypesAndTypeRefs).map { + (actualArgAny, formalParamTypeAndTypeRef) => + val genActualArgAny = genExpr(actualArgAny) + genAsInstanceOf(genActualArgAny, formalParamTypeAndTypeRef._1)(genActualArgAny.pos) + } + + (formalParamTypesAndTypeRefs.map(pair => toParamOrResultTypeRef(pair._2)), actualArgs) + + case _ => + report.error( + "Passing the varargs of Selectable.applyDynamic with `: _*` " + + "is not supported in Scala.js.", + tree.sourcePos) + (Nil, Nil) + } + } + + val methodName = MethodName.reflectiveProxy(methodNameStr, formalParamTypeRefs) + + js.Apply(js.ApplyFlags.empty, selectedValueTree, js.MethodIdent(methodName), actualArgs)(jstpe.AnyType) + } + + /** Gen actual actual arguments to Scala method call. + * Returns a list of the transformed arguments. + * + * This tries to optimize repeated arguments (varargs) by turning them + * into js.WrappedArray instead of Scala wrapped arrays. + */ + private def genActualArgs(sym: Symbol, args: List[Tree])( + implicit pos: Position): List[js.Tree] = { + args.map(genExpr) + /*val wereRepeated = exitingPhase(currentRun.typerPhase) { + sym.tpe.params.map(p => isScalaRepeatedParamType(p.tpe)) + } + + if (wereRepeated.size > args.size) { + // Should not happen, but let's not crash + args.map(genExpr) + } else { + /* Arguments that are in excess compared to the type signature after + * erasure are lambda-lifted arguments. They cannot be repeated, hence + * the extension to `false`. + */ + for ((arg, wasRepeated) <- args.zipAll(wereRepeated, EmptyTree, false)) yield { + if (wasRepeated) { + tryGenRepeatedParamAsJSArray(arg, handleNil = false).fold { + genExpr(arg) + } { genArgs => + genNew(WrappedArrayClass, WrappedArray_ctor, + List(js.JSArrayConstr(genArgs))) + } + } else { + genExpr(arg) + } + } + }*/ + } + + /** Gen actual actual arguments to a JS method call. + * Returns a list of the transformed arguments. + * + * - TODO Repeated arguments (varargs) are expanded + * - Default arguments are omitted or replaced by undefined + * - All arguments are boxed + * + * Repeated arguments that cannot be expanded at compile time (i.e., if a + * Seq is passed to a varargs parameter with the syntax `seq: _*`) will be + * wrapped in a [[js.JSSpread]] node to be expanded at runtime. + */ + private def genActualJSArgs(sym: Symbol, args: List[Tree])( + implicit pos: Position): List[js.TreeOrJSSpread] = { + + var reversedArgs: List[js.TreeOrJSSpread] = Nil + + for ((arg, info) <- args.zip(sym.jsParamInfos)) { + if (info.repeated) { + reversedArgs = genJSRepeatedParam(arg) reverse_::: reversedArgs + } else if (info.capture) { + // Ignore captures + assert(sym.isClassConstructor, + i"Found a capture param in method ${sym.fullName}, which is not a class constructor, at $pos") + } else { + val unboxedArg = genExpr(arg) + val boxedArg = unboxedArg match { + case js.Transient(UndefinedParam) => + unboxedArg + case _ => + box(unboxedArg, info.info) + } + reversedArgs ::= boxedArg + } + } + + /* Remove all consecutive UndefinedParam's at the end of the argument + * list. No check is performed whether they may be there, since they will + * only be placed where default arguments can be anyway. + */ + reversedArgs = reversedArgs.dropWhile(_.isInstanceOf[js.Transient]) + + /* Find remaining UndefinedParam and replace by js.Undefined. This can + * happen with named arguments or with multiple argument lists. + */ + reversedArgs = reversedArgs map { + case js.Transient(UndefinedParam) => js.Undefined() + case arg => arg + } + + reversedArgs.reverse + } + + /** Gen JS code for a repeated param of a JS method. + * + * In this case `arg` has type `Seq[T]` for some `T`, but the result should + * be an expanded list of the elements in the sequence. So this method + * takes care of the conversion. + * + * It is specialized for the shapes of tree generated by the desugaring + * of repeated params in Scala, so that these are actually expanded at + * compile-time. + * + * Otherwise, it returns a `JSSpread` with the `Seq` converted to a + * `js.Array`. + */ + private def genJSRepeatedParam(arg: Tree): List[js.TreeOrJSSpread] = { + tryGenRepeatedParamAsJSArray(arg, handleNil = true).getOrElse { + /* Fall back to calling runtime.genTraversableOnce2jsArray + * to perform the conversion to js.Array, then wrap in a Spread + * operator. + */ + implicit val pos: SourcePosition = arg.sourcePos + val jsArrayArg = genModuleApplyMethod( + jsdefn.Runtime_toJSVarArgs, + List(genExpr(arg))) + List(js.JSSpread(jsArrayArg)) + } + } + + /** Try and expand an actual argument to a repeated param `(xs: T*)`. + * + * This method recognizes the shapes of tree generated by the desugaring + * of repeated params in Scala, and expands them. + * If `arg` does not have the shape of a generated repeated param, this + * method returns `None`. + */ + private def tryGenRepeatedParamAsJSArray(arg: Tree, + handleNil: Boolean): Option[List[js.Tree]] = { + implicit val pos = arg.span + + // Given a method `def foo(args: T*)` + arg match { + // foo(arg1, arg2, ..., argN) where N > 0 + case MaybeAsInstanceOf(WrapArray(MaybeAsInstanceOf(array: JavaSeqLiteral))) => + /* Value classes in arrays are already boxed, so no need to use + * the type before erasure. + * TODO Is this true in dotty? + */ + Some(array.elems.map(e => box(genExpr(e), e.tpe))) + + // foo() + case Ident(_) if handleNil && arg.symbol == defn.NilModule => + Some(Nil) + + // foo(argSeq: _*) - cannot be optimized + case _ => + None + } + } + + private object MaybeAsInstanceOf { + def unapply(tree: Tree): Some[Tree] = tree match { + case TypeApply(asInstanceOf_? @ Select(base, _), _) + if asInstanceOf_?.symbol == defn.Any_asInstanceOf => + Some(base) + case _ => + Some(tree) + } + } + + private object WrapArray { + lazy val isWrapArray: Set[Symbol] = { + val names0 = defn.ScalaValueClasses().map(sym => nme.wrapXArray(sym.name)) + val names1 = names0 ++ Set(nme.wrapRefArray, nme.genericWrapArray) + val symsInPredef = names1.map(defn.ScalaPredefModule.requiredMethod(_)) + val symsInScalaRunTime = names1.map(defn.ScalaRuntimeModule.requiredMethod(_)) + (symsInPredef ++ symsInScalaRunTime).toSet + } + + def unapply(tree: Apply): Option[Tree] = tree match { + case Apply(wrapArray_?, List(wrapped)) if isWrapArray(wrapArray_?.symbol) => + Some(wrapped) + case _ => + None + } + } + + /** Wraps a `js.Array` to use as varargs. */ + def genJSArrayToVarArgs(arrayRef: js.Tree)(implicit pos: SourcePosition): js.Tree = + genModuleApplyMethod(jsdefn.Runtime_toScalaVarArgs, List(arrayRef)) + + /** Gen the actual capture values for a JS constructor based on its fake `new` invocation. */ + private def genCaptureValuesFromFakeNewInstance(tree: Tree): List[js.Tree] = { + implicit val pos: Position = tree.span + + val Apply(fun @ Select(New(_), _), args) = tree: @unchecked + val sym = fun.symbol + + /* We use the same strategy as genActualJSArgs to detect which parameters were + * introduced by explicitouter or lambdalift (but reversed, of course). + */ + + val existedBeforeUncurry = atPhase(elimRepeatedPhase) { + sym.info.paramNamess.flatten.toSet + } + + for { + (arg, paramName) <- args.zip(sym.info.paramNamess.flatten) + if !existedBeforeUncurry(paramName) + } yield { + genExpr(arg) + } + } + + private def genVarRef(sym: Symbol)(implicit pos: Position): js.VarRef = + js.VarRef(encodeLocalSym(sym))(toIRType(sym.info)) + + private def genAssignableField(sym: Symbol, qualifier: Tree)(implicit pos: SourcePosition): (js.AssignLhs, Boolean) = { + def qual = genExpr(qualifier) + + if (sym.owner.isNonNativeJSClass) { + val f = if (sym.isJSExposed) { + js.JSSelect(qual, genExpr(sym.jsName)) + } else if (sym.owner.isAnonymousClass) { + js.JSSelect( + js.JSSelect(qual, genPrivateFieldsSymbol()), + encodeFieldSymAsStringLiteral(sym)) + } else { + js.JSPrivateSelect(qual, encodeClassName(sym.owner), + encodeFieldSym(sym)) + } + + (f, true) + } else if (sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot)) { + val f = js.SelectStatic(encodeClassName(sym.owner), encodeFieldSym(sym))(jstpe.AnyType) + (f, true) + } else if (sym.hasAnnotation(jsdefn.JSExportStaticAnnot)) { + val jsName = sym.getAnnotation(jsdefn.JSExportStaticAnnot).get.argumentConstantString(0).getOrElse { + sym.defaultJSName + } + val companionClass = sym.owner.linkedClass + val f = js.JSSelect(genLoadJSConstructor(companionClass), js.StringLiteral(jsName)) + (f, true) + } else { + val className = encodeClassName(sym.owner) + val fieldIdent = encodeFieldSym(sym) + + /* #4370 Fields cannot have type NothingType, so we box them as + * scala.runtime.Nothing$ instead. They will be initialized with + * `null`, and any attempt to access them will throw a + * `ClassCastException` (generated in the unboxing code). + */ + val (irType, boxed) = toIRType(sym.info) match + case jstpe.NothingType => + (encodeClassType(defn.NothingClass), true) + case ftpe => + (ftpe, false) + + val f = + if sym.is(JavaStatic) then + js.SelectStatic(className, fieldIdent)(irType) + else + js.Select(qual, className, fieldIdent)(irType) + + (f, boxed) + } + } + + /** Gen JS code for loading a Java static field. + */ + private def genLoadStaticField(sym: Symbol)(implicit pos: SourcePosition): js.Tree = { + /* Actually, there is no static member in Scala.js. If we come here, that + * is because we found the symbol in a Java-emitted .class in the + * classpath. But the corresponding implementation in Scala.js will + * actually be a val in the companion module. + */ + + if (sym == defn.BoxedUnit_UNIT) { + js.Undefined() + } else if (sym == defn.BoxedUnit_TYPE) { + js.ClassOf(jstpe.VoidRef) + } else { + val className = encodeClassName(sym.owner) + val method = encodeStaticMemberSym(sym) + js.ApplyStatic(js.ApplyFlags.empty, className, method, Nil)(toIRType(sym.info)) + } + } + + /** Generates a call to `runtime.privateFieldsSymbol()` */ + private def genPrivateFieldsSymbol()(implicit pos: SourcePosition): js.Tree = + genModuleApplyMethod(jsdefn.Runtime_privateFieldsSymbol, Nil) + + /** Generate loading of a module value. + * + * Can be given either the module symbol or its module class symbol. + * + * If the module we load refers to the global scope (i.e., it is + * annotated with `@JSGlobalScope`), report a compile error specifying + * that a global scope object should only be used as the qualifier of a + * `.`-selection. + */ + def genLoadModule(sym: Symbol)(implicit pos: SourcePosition): js.Tree = + ruleOutGlobalScope(genLoadModuleOrGlobalScope(sym)) + + /** Generate loading of a module value or the global scope. + * + * Can be given either the module symbol of its module class symbol. + * + * Unlike `genLoadModule`, this method does not fail if the module we load + * refers to the global scope. + */ + def genLoadModuleOrGlobalScope(sym0: Symbol)( + implicit pos: SourcePosition): MaybeGlobalScope = { + + require(sym0.is(Module), + "genLoadModule called with non-module symbol: " + sym0) + val sym = if (sym0.isTerm) sym0.moduleClass else sym0 + + // Does that module refer to the global scope? + if (sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { + MaybeGlobalScope.GlobalScope(pos) + } else { + val cls = encodeClassName(sym) + val tree = + if (sym.isJSType) js.LoadJSModule(cls) + else js.LoadModule(cls) + MaybeGlobalScope.NotGlobalScope(tree) + } + } + + /** Gen JS code representing the constructor of a JS class. */ + private def genLoadJSConstructor(sym: Symbol)( + implicit pos: Position): js.Tree = { + assert(!isStaticModule(sym) && !sym.is(Trait), + s"genLoadJSConstructor called with non-class $sym") + js.LoadJSConstructor(encodeClassName(sym)) + } + + private inline val GenericGlobalObjectInformationMsg = { + "\n " + + "See https://www.scala-js.org/doc/interoperability/global-scope.html " + + "for further information." + } + + /** Rule out the `GlobalScope` case of a `MaybeGlobalScope` and extract the + * value tree. + * + * If `tree` represents the global scope, report a compile error. + */ + private def ruleOutGlobalScope(tree: MaybeGlobalScope): js.Tree = { + tree match { + case MaybeGlobalScope.NotGlobalScope(t) => + t + case MaybeGlobalScope.GlobalScope(pos) => + reportErrorLoadGlobalScope()(pos) + } + } + + /** Report a compile error specifying that the global scope cannot be + * loaded as a value. + */ + private def reportErrorLoadGlobalScope()(implicit pos: SourcePosition): js.Tree = { + report.error( + "Loading the global scope as a value (anywhere but as the " + + "left-hand-side of a `.`-selection) is not allowed." + + GenericGlobalObjectInformationMsg, + pos) + js.Undefined() + } + + /** Gen a JS bracket select or a `JSGlobalRef`. + * + * If the receiver is a normal value, i.e., not the global scope, then + * emit a `JSSelect`. + * + * Otherwise, if the `item` is a constant string that is a valid + * JavaScript identifier, emit a `JSGlobalRef`. + * + * Otherwise, report a compile error. + */ + private def genJSSelectOrGlobalRef(qual: MaybeGlobalScope, item: js.Tree)( + implicit pos: SourcePosition): js.AssignLhs = { + qual match { + case MaybeGlobalScope.NotGlobalScope(qualTree) => + js.JSSelect(qualTree, item) + + case MaybeGlobalScope.GlobalScope(_) => + item match { + case js.StringLiteral(value) => + if (js.JSGlobalRef.isValidJSGlobalRefName(value)) { + js.JSGlobalRef(value) + } else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) { + report.error( + "Invalid selection in the global scope of the reserved " + + s"identifier name `$value`." + + GenericGlobalObjectInformationMsg, + pos) + js.JSGlobalRef("erroneous") + } else { + report.error( + "Selecting a field of the global scope whose name is " + + "not a valid JavaScript identifier is not allowed." + + GenericGlobalObjectInformationMsg, + pos) + js.JSGlobalRef("erroneous") + } + + case _ => + report.error( + "Selecting a field of the global scope with a dynamic " + + "name is not allowed." + + GenericGlobalObjectInformationMsg, + pos) + js.JSGlobalRef("erroneous") + } + } + } + + /** Gen a JS bracket method apply or an apply of a `GlobalRef`. + * + * If the receiver is a normal value, i.e., not the global scope, then + * emit a `JSMethodApply`. + * + * Otherwise, if the `method` is a constant string that is a valid + * JavaScript identifier, emit a `JSFunctionApply(JSGlobalRef(...), ...)`. + * + * Otherwise, report a compile error. + */ + private def genJSMethodApplyOrGlobalRefApply( + receiver: MaybeGlobalScope, method: js.Tree, args: List[js.TreeOrJSSpread])( + implicit pos: SourcePosition): js.Tree = { + receiver match { + case MaybeGlobalScope.NotGlobalScope(receiverTree) => + js.JSMethodApply(receiverTree, method, args) + + case MaybeGlobalScope.GlobalScope(_) => + method match { + case js.StringLiteral(value) => + if (js.JSGlobalRef.isValidJSGlobalRefName(value)) { + js.JSFunctionApply(js.JSGlobalRef(value), args) + } else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) { + report.error( + "Invalid call in the global scope of the reserved " + + s"identifier name `$value`." + + GenericGlobalObjectInformationMsg, + pos) + js.Undefined() + } else { + report.error( + "Calling a method of the global scope whose name is not " + + "a valid JavaScript identifier is not allowed." + + GenericGlobalObjectInformationMsg, + pos) + js.Undefined() + } + + case _ => + report.error( + "Calling a method of the global scope with a dynamic " + + "name is not allowed." + + GenericGlobalObjectInformationMsg, + pos) + js.Undefined() + } + } + } + + private def computeJSNativeLoadSpecOfValDef(sym: Symbol): js.JSNativeLoadSpec = { + atPhaseBeforeTransforms { + computeJSNativeLoadSpecOfInPhase(sym) + } + } + + private def computeJSNativeLoadSpecOfClass(sym: Symbol): Option[js.JSNativeLoadSpec] = { + if (sym.is(Trait) || sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { + None + } else { + atPhaseBeforeTransforms { + if (sym.owner.isStaticOwner) + Some(computeJSNativeLoadSpecOfInPhase(sym)) + else + None + } + } + } + + private def computeJSNativeLoadSpecOfInPhase(sym: Symbol)(using Context): js.JSNativeLoadSpec = { + import js.JSNativeLoadSpec._ + + val symOwner = sym.owner + + // Marks a code path as unexpected because it should have been reported as an error in `PrepJSInterop`. + def unexpected(msg: String): Nothing = + throw new FatalError(i"$msg for ${sym.fullName} at ${sym.srcPos}") + + if (symOwner.hasAnnotation(jsdefn.JSNativeAnnot)) { + val jsName = sym.jsName match { + case JSName.Literal(jsName) => jsName + case JSName.Computed(_) => unexpected("could not read the simple JS name as a string literal") + } + + if (symOwner.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { + Global(jsName, Nil) + } else { + val ownerLoadSpec = computeJSNativeLoadSpecOfInPhase(symOwner) + ownerLoadSpec match { + case Global(globalRef, path) => + Global(globalRef, path :+ jsName) + case Import(module, path) => + Import(module, path :+ jsName) + case ImportWithGlobalFallback(Import(module, modulePath), Global(globalRef, globalPath)) => + ImportWithGlobalFallback( + Import(module, modulePath :+ jsName), + Global(globalRef, globalPath :+ jsName)) + } + } + } else { + def parsePath(pathName: String): List[String] = + pathName.split('.').toList + + def parseGlobalPath(pathName: String): Global = { + val globalRef :: path = parsePath(pathName): @unchecked + Global(globalRef, path) + } + + val annot = sym.annotations.find { annot => + annot.symbol == jsdefn.JSGlobalAnnot || annot.symbol == jsdefn.JSImportAnnot + }.getOrElse { + unexpected("could not find the JS native load spec annotation") + } + + if (annot.symbol == jsdefn.JSGlobalAnnot) { + val pathName = annot.argumentConstantString(0).getOrElse { + sym.defaultJSName + } + parseGlobalPath(pathName) + } else { // annot.symbol == jsdefn.JSImportAnnot + val module = annot.argumentConstantString(0).getOrElse { + unexpected("could not read the module argument as a string literal") + } + val path = annot.argumentConstantString(1).fold { + if (annot.arguments.sizeIs < 2) + parsePath(sym.defaultJSName) + else + Nil + } { pathName => + parsePath(pathName) + } + val importSpec = Import(module, path) + annot.argumentConstantString(2).fold[js.JSNativeLoadSpec] { + importSpec + } { globalPathName => + ImportWithGlobalFallback(importSpec, parseGlobalPath(globalPathName)) + } + } + } + } + + private def isMethodStaticInIR(sym: Symbol): Boolean = + sym.is(JavaStatic) + + /** Generate a Class[_] value (e.g. coming from classOf[T]) */ + private def genClassConstant(tpe: Type)(implicit pos: Position): js.Tree = + js.ClassOf(toTypeRef(tpe)) + + private def isStaticModule(sym: Symbol): Boolean = + sym.is(Module) && sym.isStatic + + private def isPrimitiveValueType(tpe: Type): Boolean = { + tpe.widenDealias match { + case JavaArrayType(_) => false + case _: ErasedValueType => false + case t => t.typeSymbol.asClass.isPrimitiveValueClass + } + } + + protected lazy val isHijackedClass: Set[Symbol] = { + /* This list is a duplicate of ir.Definitions.HijackedClasses, but + * with global.Symbol's instead of IR encoded names as Strings. + * We also add java.lang.Void, which BoxedUnit "erases" to. + */ + Set[Symbol]( + defn.BoxedUnitClass, defn.BoxedBooleanClass, defn.BoxedCharClass, defn.BoxedByteClass, + defn.BoxedShortClass, defn.BoxedIntClass, defn.BoxedLongClass, defn.BoxedFloatClass, + defn.BoxedDoubleClass, defn.StringClass, jsdefn.JavaLangVoidClass + ) + } + + private def isMaybeJavaScriptException(tpe: Type): Boolean = + jsdefn.JavaScriptExceptionClass.isSubClass(tpe.typeSymbol) + + private def hasDefaultCtorArgsAndJSModule(classSym: Symbol): Boolean = { + def hasNativeCompanion = + classSym.companionModule.moduleClass.hasAnnotation(jsdefn.JSNativeAnnot) + def hasDefaultParameters = + classSym.info.decls.exists(sym => sym.isClassConstructor && sym.hasDefaultParams) + + hasNativeCompanion && hasDefaultParameters + } + + // Copied from DottyBackendInterface + + private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] + + def desugarIdent(i: Ident): Option[tpd.Select] = { + var found = desugared.get(i.tpe) + if (found == null) { + tpd.desugarIdent(i) match { + case sel: tpd.Select => + desugared.put(i.tpe, sel) + found = sel + case _ => + } + } + if (found == null) None else Some(found) + } +} + +object JSCodeGen { + + private val NullPointerExceptionClass = ClassName("java.lang.NullPointerException") + private val JSObjectClassName = ClassName("scala.scalajs.js.Object") + private val JavaScriptExceptionClassName = ClassName("scala.scalajs.js.JavaScriptException") + + private val ObjectClassRef = jstpe.ClassRef(ir.Names.ObjectClass) + + private val newSimpleMethodName = SimpleMethodName("new") + + private val selectedValueMethodName = MethodName("selectedValue", Nil, ObjectClassRef) + + private val ObjectArgConstructorName = MethodName.constructor(List(ObjectClassRef)) + + private val thisOriginalName = OriginalName("this") + + sealed abstract class MaybeGlobalScope + + object MaybeGlobalScope { + final case class NotGlobalScope(tree: js.Tree) extends MaybeGlobalScope + + final case class GlobalScope(pos: SourcePosition) extends MaybeGlobalScope + } + + /** Marker object for undefined parameters in JavaScript semantic calls. + * + * To be used inside a `js.Transient` node. + */ + case object UndefinedParam extends js.Transient.Value { + val tpe: jstpe.Type = jstpe.UndefType + + def traverse(traverser: ir.Traversers.Traverser): Unit = () + + def transform(transformer: ir.Transformers.Transformer, isStat: Boolean)( + implicit pos: ir.Position): js.Tree = { + js.Transient(this) + } + + def printIR(out: ir.Printers.IRTreePrinter): Unit = + out.print("") + } + + /** Info about a default param accessor. + * + * The method must have a default getter name for this class to make sense. + */ + private class DefaultParamInfo(sym: Symbol)(using Context) { + private val methodName = sym.name.exclude(DefaultGetterName) + + def isForConstructor: Boolean = methodName == nme.CONSTRUCTOR + + /** When `isForConstructor` is true, returns the owner of the attached + * constructor. + */ + def constructorOwner: Symbol = sym.owner.linkedClass + + /** When `isForConstructor` is false, returns the method attached to the + * specified default accessor. + */ + def attachedMethod: Symbol = { + // If there are overloads, we need to find the one that has default params. + val overloads = sym.owner.info.decl(methodName) + if (!overloads.isOverloaded) + overloads.symbol + else + overloads.suchThat(_.is(HasDefaultParams, butNot = Bridge)).symbol + } + } + +} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala b/tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala new file mode 100644 index 000000000000..964811c69e19 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala @@ -0,0 +1,340 @@ +package dotty.tools.backend.sjs + +import scala.language.unsafeNulls + +import scala.annotation.threadUnsafe + +import dotty.tools.dotc.core._ +import Names._ +import Types._ +import Contexts._ +import Symbols._ +import StdNames._ + +import dotty.tools.dotc.config.SJSPlatform + +object JSDefinitions { + /** The Scala.js-specific definitions for the current context. */ + def jsdefn(using Context): JSDefinitions = + ctx.platform.asInstanceOf[SJSPlatform].jsDefinitions +} + +final class JSDefinitions()(using DetachedContext) { + + @threadUnsafe lazy val InlineAnnotType: TypeRef = requiredClassRef("scala.inline") + def InlineAnnot(using Context) = InlineAnnotType.symbol.asClass + @threadUnsafe lazy val NoinlineAnnotType: TypeRef = requiredClassRef("scala.noinline") + def NoinlineAnnot(using Context) = NoinlineAnnotType.symbol.asClass + + @threadUnsafe lazy val JavaLangVoidType: TypeRef = requiredClassRef("java.lang.Void") + def JavaLangVoidClass(using Context) = JavaLangVoidType.symbol.asClass + + @threadUnsafe lazy val ScalaJSJSPackageVal = requiredPackage("scala.scalajs.js") + @threadUnsafe lazy val ScalaJSJSPackageClass = ScalaJSJSPackageVal.moduleClass.asClass + @threadUnsafe lazy val JSPackage_typeOfR = ScalaJSJSPackageClass.requiredMethodRef("typeOf") + def JSPackage_typeOf(using Context) = JSPackage_typeOfR.symbol + @threadUnsafe lazy val JSPackage_constructorOfR = ScalaJSJSPackageClass.requiredMethodRef("constructorOf") + def JSPackage_constructorOf(using Context) = JSPackage_constructorOfR.symbol + @threadUnsafe lazy val JSPackage_nativeR = ScalaJSJSPackageClass.requiredMethodRef("native") + def JSPackage_native(using Context) = JSPackage_nativeR.symbol + @threadUnsafe lazy val JSPackage_undefinedR = ScalaJSJSPackageClass.requiredMethodRef("undefined") + def JSPackage_undefined(using Context) = JSPackage_undefinedR.symbol + @threadUnsafe lazy val JSPackage_dynamicImportR = ScalaJSJSPackageClass.requiredMethodRef("dynamicImport") + def JSPackage_dynamicImport(using Context) = JSPackage_dynamicImportR.symbol + + @threadUnsafe lazy val JSNativeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.native") + def JSNativeAnnot(using Context) = JSNativeAnnotType.symbol.asClass + + @threadUnsafe lazy val JSAnyType: TypeRef = requiredClassRef("scala.scalajs.js.Any") + def JSAnyClass(using Context) = JSAnyType.symbol.asClass + @threadUnsafe lazy val JSObjectType: TypeRef = requiredClassRef("scala.scalajs.js.Object") + def JSObjectClass(using Context) = JSObjectType.symbol.asClass + @threadUnsafe lazy val JSFunctionType: TypeRef = requiredClassRef("scala.scalajs.js.Function") + def JSFunctionClass(using Context) = JSFunctionType.symbol.asClass + @threadUnsafe lazy val JSThisFunctionType: TypeRef = requiredClassRef("scala.scalajs.js.ThisFunction") + def JSThisFunctionClass(using Context) = JSThisFunctionType.symbol.asClass + + @threadUnsafe lazy val PseudoUnionType: TypeRef = requiredClassRef("scala.scalajs.js.|") + def PseudoUnionClass(using Context) = PseudoUnionType.symbol.asClass + + @threadUnsafe lazy val PseudoUnionModuleRef = requiredModuleRef("scala.scalajs.js.|") + def PseudoUnionModule(using Context) = PseudoUnionModuleRef.symbol + @threadUnsafe lazy val PseudoUnion_fromR = PseudoUnionModule.requiredMethodRef("from") + def PseudoUnion_from(using Context) = PseudoUnion_fromR.symbol + @threadUnsafe lazy val PseudoUnion_fromTypeConstructorR = PseudoUnionModule.requiredMethodRef("fromTypeConstructor") + def PseudoUnion_fromTypeConstructor(using Context) = PseudoUnion_fromTypeConstructorR.symbol + + @threadUnsafe lazy val UnionOpsModuleRef = requiredModuleRef("scala.scalajs.js.internal.UnitOps") + + @threadUnsafe lazy val JSArrayType: TypeRef = requiredClassRef("scala.scalajs.js.Array") + def JSArrayClass(using Context) = JSArrayType.symbol.asClass + @threadUnsafe lazy val JSDynamicType: TypeRef = requiredClassRef("scala.scalajs.js.Dynamic") + def JSDynamicClass(using Context) = JSDynamicType.symbol.asClass + + @threadUnsafe lazy val RuntimeExceptionType: TypeRef = requiredClassRef("java.lang.RuntimeException") + def RuntimeExceptionClass(using Context) = RuntimeExceptionType.symbol.asClass + @threadUnsafe lazy val JavaScriptExceptionType: TypeRef = requiredClassRef("scala.scalajs.js.JavaScriptException") + def JavaScriptExceptionClass(using Context) = JavaScriptExceptionType.symbol.asClass + + @threadUnsafe lazy val JSGlobalAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSGlobal") + def JSGlobalAnnot(using Context) = JSGlobalAnnotType.symbol.asClass + @threadUnsafe lazy val JSImportAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSImport") + def JSImportAnnot(using Context) = JSImportAnnotType.symbol.asClass + @threadUnsafe lazy val JSGlobalScopeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSGlobalScope") + def JSGlobalScopeAnnot(using Context) = JSGlobalScopeAnnotType.symbol.asClass + @threadUnsafe lazy val JSNameAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSName") + def JSNameAnnot(using Context) = JSNameAnnotType.symbol.asClass + @threadUnsafe lazy val JSFullNameAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSFullName") + def JSFullNameAnnot(using Context) = JSFullNameAnnotType.symbol.asClass + @threadUnsafe lazy val JSBracketAccessAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSBracketAccess") + def JSBracketAccessAnnot(using Context) = JSBracketAccessAnnotType.symbol.asClass + @threadUnsafe lazy val JSBracketCallAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSBracketCall") + def JSBracketCallAnnot(using Context) = JSBracketCallAnnotType.symbol.asClass + @threadUnsafe lazy val JSExportTopLevelAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportTopLevel") + def JSExportTopLevelAnnot(using Context) = JSExportTopLevelAnnotType.symbol.asClass + @threadUnsafe lazy val JSExportAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExport") + def JSExportAnnot(using Context) = JSExportAnnotType.symbol.asClass + @threadUnsafe lazy val JSExportStaticAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportStatic") + def JSExportStaticAnnot(using Context) = JSExportStaticAnnotType.symbol.asClass + @threadUnsafe lazy val JSExportAllAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportAll") + def JSExportAllAnnot(using Context) = JSExportAllAnnotType.symbol.asClass + + def JSAnnotPackage(using Context) = JSGlobalAnnot.owner.asClass + + @threadUnsafe lazy val JSTypeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.JSType") + def JSTypeAnnot(using Context) = JSTypeAnnotType.symbol.asClass + @threadUnsafe lazy val JSOptionalAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.JSOptional") + def JSOptionalAnnot(using Context) = JSOptionalAnnotType.symbol.asClass + @threadUnsafe lazy val ExposedJSMemberAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.ExposedJSMember") + def ExposedJSMemberAnnot(using Context) = ExposedJSMemberAnnotType.symbol.asClass + + @threadUnsafe lazy val JSImportNamespaceModuleRef = requiredModuleRef("scala.scalajs.js.annotation.JSImport.Namespace") + def JSImportNamespaceModule(using Context) = JSImportNamespaceModuleRef.symbol + + @threadUnsafe lazy val JSAnyModuleRef = requiredModuleRef("scala.scalajs.js.Any") + def JSAnyModule(using Context) = JSAnyModuleRef.symbol + @threadUnsafe lazy val JSAny_fromFunctionR = (0 to 22).map(n => JSAnyModule.requiredMethodRef("fromFunction" + n)).toArray + def JSAny_fromFunction(n: Int)(using Context) = JSAny_fromFunctionR(n).symbol + + @threadUnsafe lazy val JSDynamicModuleRef = requiredModuleRef("scala.scalajs.js.Dynamic") + def JSDynamicModule(using Context) = JSDynamicModuleRef.symbol + @threadUnsafe lazy val JSDynamic_globalR = JSDynamicModule.requiredMethodRef("global") + def JSDynamic_global(using Context) = JSDynamic_globalR.symbol + @threadUnsafe lazy val JSDynamic_newInstanceR = JSDynamicModule.requiredMethodRef("newInstance") + def JSDynamic_newInstance(using Context) = JSDynamic_newInstanceR.symbol + + @threadUnsafe lazy val JSDynamicLiteralModuleRef = JSDynamicModule.moduleClass.requiredValueRef("literal") + def JSDynamicLiteralModule(using Context) = JSDynamicLiteralModuleRef.symbol + @threadUnsafe lazy val JSDynamicLiteral_applyDynamicNamedR = JSDynamicLiteralModule.requiredMethodRef("applyDynamicNamed") + def JSDynamicLiteral_applyDynamicNamed(using Context) = JSDynamicLiteral_applyDynamicNamedR.symbol + @threadUnsafe lazy val JSDynamicLiteral_applyDynamicR = JSDynamicLiteralModule.requiredMethodRef("applyDynamic") + def JSDynamicLiteral_applyDynamic(using Context) = JSDynamicLiteral_applyDynamicR.symbol + + @threadUnsafe lazy val JSObjectModuleRef = requiredModuleRef("scala.scalajs.js.Object") + def JSObjectModule(using Context) = JSObjectModuleRef.symbol + + @threadUnsafe lazy val JSArrayModuleRef = requiredModuleRef("scala.scalajs.js.Array") + def JSArrayModule(using Context) = JSArrayModuleRef.symbol + @threadUnsafe lazy val JSArray_applyR = JSArrayModule.requiredMethodRef(nme.apply) + def JSArray_apply(using Context) = JSArray_applyR.symbol + + @threadUnsafe lazy val JSThisFunctionModuleRef = requiredModuleRef("scala.scalajs.js.ThisFunction") + def JSThisFunctionModule(using Context) = JSThisFunctionModuleRef.symbol + @threadUnsafe lazy val JSThisFunction_fromFunctionR = (1 to 22).map(n => JSThisFunctionModule.requiredMethodRef("fromFunction" + n)).toArray + def JSThisFunction_fromFunction(n: Int)(using Context) = JSThisFunction_fromFunctionR(n - 1).symbol + + @threadUnsafe lazy val JSConstructorTagModuleRef = requiredModuleRef("scala.scalajs.js.ConstructorTag") + def JSConstructorTagModule(using Context) = JSConstructorTagModuleRef.symbol + @threadUnsafe lazy val JSConstructorTag_materializeR = JSConstructorTagModule.requiredMethodRef("materialize") + def JSConstructorTag_materialize(using Context) = JSConstructorTag_materializeR.symbol + + @threadUnsafe lazy val JSNewModuleRef = requiredModuleRef("scala.scalajs.js.new") + def JSNewModule(using Context) = JSNewModuleRef.symbol + @threadUnsafe lazy val JSNew_targetR = JSNewModule.requiredMethodRef("target") + def JSNew_target(using Context) = JSNew_targetR.symbol + + @threadUnsafe lazy val JSImportModuleRef = requiredModuleRef("scala.scalajs.js.import") + def JSImportModule(using Context) = JSImportModuleRef.symbol + @threadUnsafe lazy val JSImport_applyR = JSImportModule.requiredMethodRef(nme.apply) + def JSImport_apply(using Context) = JSImport_applyR.symbol + @threadUnsafe lazy val JSImport_metaR = JSImportModule.requiredMethodRef("meta") + def JSImport_meta(using Context) = JSImport_metaR.symbol + + @threadUnsafe lazy val RuntimePackageVal = requiredPackage("scala.scalajs.runtime") + @threadUnsafe lazy val RuntimePackageClass = RuntimePackageVal.moduleClass.asClass + @threadUnsafe lazy val Runtime_toScalaVarArgsR = RuntimePackageClass.requiredMethodRef("toScalaVarArgs") + def Runtime_toScalaVarArgs(using Context) = Runtime_toScalaVarArgsR.symbol + @threadUnsafe lazy val Runtime_toJSVarArgsR = RuntimePackageClass.requiredMethodRef("toJSVarArgs") + def Runtime_toJSVarArgs(using Context) = Runtime_toJSVarArgsR.symbol + @threadUnsafe lazy val Runtime_privateFieldsSymbolR = RuntimePackageClass.requiredMethodRef("privateFieldsSymbol") + def Runtime_privateFieldsSymbol(using Context) = Runtime_privateFieldsSymbolR.symbol + @threadUnsafe lazy val Runtime_constructorOfR = RuntimePackageClass.requiredMethodRef("constructorOf") + def Runtime_constructorOf(using Context) = Runtime_constructorOfR.symbol + @threadUnsafe lazy val Runtime_newConstructorTagR = RuntimePackageClass.requiredMethodRef("newConstructorTag") + def Runtime_newConstructorTag(using Context) = Runtime_newConstructorTagR.symbol + @threadUnsafe lazy val Runtime_createInnerJSClassR = RuntimePackageClass.requiredMethodRef("createInnerJSClass") + def Runtime_createInnerJSClass(using Context) = Runtime_createInnerJSClassR.symbol + @threadUnsafe lazy val Runtime_createLocalJSClassR = RuntimePackageClass.requiredMethodRef("createLocalJSClass") + def Runtime_createLocalJSClass(using Context) = Runtime_createLocalJSClassR.symbol + @threadUnsafe lazy val Runtime_withContextualJSClassValueR = RuntimePackageClass.requiredMethodRef("withContextualJSClassValue") + def Runtime_withContextualJSClassValue(using Context) = Runtime_withContextualJSClassValueR.symbol + @threadUnsafe lazy val Runtime_linkingInfoR = RuntimePackageClass.requiredMethodRef("linkingInfo") + def Runtime_linkingInfo(using Context) = Runtime_linkingInfoR.symbol + @threadUnsafe lazy val Runtime_dynamicImportR = RuntimePackageClass.requiredMethodRef("dynamicImport") + def Runtime_dynamicImport(using Context) = Runtime_dynamicImportR.symbol + + @threadUnsafe lazy val DynamicImportThunkType: TypeRef = requiredClassRef("scala.scalajs.runtime.DynamicImportThunk") + def DynamicImportThunkClass(using Context) = DynamicImportThunkType.symbol.asClass + @threadUnsafe lazy val DynamicImportThunkClass_applyR = DynamicImportThunkClass.requiredMethodRef(nme.apply) + def DynamicImportThunkClass_apply(using Context) = DynamicImportThunkClass_applyR.symbol + + @threadUnsafe lazy val SpecialPackageVal = requiredPackage("scala.scalajs.js.special") + @threadUnsafe lazy val SpecialPackageClass = SpecialPackageVal.moduleClass.asClass + @threadUnsafe lazy val Special_debuggerR = SpecialPackageClass.requiredMethodRef("debugger") + def Special_debugger(using Context) = Special_debuggerR.symbol + @threadUnsafe lazy val Special_deleteR = SpecialPackageClass.requiredMethodRef("delete") + def Special_delete(using Context) = Special_deleteR.symbol + @threadUnsafe lazy val Special_forinR = SpecialPackageClass.requiredMethodRef("forin") + def Special_forin(using Context) = Special_forinR.symbol + @threadUnsafe lazy val Special_inR = SpecialPackageClass.requiredMethodRef("in") + def Special_in(using Context) = Special_inR.symbol + @threadUnsafe lazy val Special_instanceofR = SpecialPackageClass.requiredMethodRef("instanceof") + def Special_instanceof(using Context) = Special_instanceofR.symbol + @threadUnsafe lazy val Special_strictEqualsR = SpecialPackageClass.requiredMethodRef("strictEquals") + def Special_strictEquals(using Context) = Special_strictEqualsR.symbol + @threadUnsafe lazy val Special_throwR = SpecialPackageClass.requiredMethodRef("throw") + def Special_throw(using Context) = Special_throwR.symbol + @threadUnsafe lazy val Special_tryCatchR = SpecialPackageClass.requiredMethodRef("tryCatch") + def Special_tryCatch(using Context) = Special_tryCatchR.symbol + @threadUnsafe lazy val Special_wrapAsThrowableR = SpecialPackageClass.requiredMethodRef("wrapAsThrowable") + def Special_wrapAsThrowable(using Context) = Special_wrapAsThrowableR.symbol + @threadUnsafe lazy val Special_unwrapFromThrowableR = SpecialPackageClass.requiredMethodRef("unwrapFromThrowable") + def Special_unwrapFromThrowable(using Context) = Special_unwrapFromThrowableR.symbol + + @threadUnsafe lazy val WrappedArrayType: TypeRef = requiredClassRef("scala.scalajs.js.WrappedArray") + def WrappedArrayClass(using Context) = WrappedArrayType.symbol.asClass + + @threadUnsafe lazy val ScalaRunTime_isArrayR = defn.ScalaRuntimeModule.requiredMethodRef("isArray", List(???, ???)) + def ScalaRunTime_isArray(using Context): Symbol = ScalaRunTime_isArrayR.symbol + + @threadUnsafe lazy val BoxesRunTime_boxToCharacterR = defn.BoxesRunTimeModule.requiredMethodRef("boxToCharacter") + def BoxesRunTime_boxToCharacter(using Context): Symbol = BoxesRunTime_boxToCharacterR.symbol + @threadUnsafe lazy val BoxesRunTime_unboxToCharR = defn.BoxesRunTimeModule.requiredMethodRef("unboxToChar") + def BoxesRunTime_unboxToChar(using Context): Symbol = BoxesRunTime_unboxToCharR.symbol + + @threadUnsafe lazy val EnableReflectiveInstantiationAnnotType: TypeRef = requiredClassRef("scala.scalajs.reflect.annotation.EnableReflectiveInstantiation") + def EnableReflectiveInstantiationAnnot(using Context) = EnableReflectiveInstantiationAnnotType.symbol.asClass + + @threadUnsafe lazy val ReflectModuleRef = requiredModuleRef("scala.scalajs.reflect.Reflect") + def ReflectModule(using Context) = ReflectModuleRef.symbol + @threadUnsafe lazy val Reflect_registerLoadableModuleClassR = ReflectModule.requiredMethodRef("registerLoadableModuleClass") + def Reflect_registerLoadableModuleClass(using Context) = Reflect_registerLoadableModuleClassR.symbol + @threadUnsafe lazy val Reflect_registerInstantiatableClassR = ReflectModule.requiredMethodRef("registerInstantiatableClass") + def Reflect_registerInstantiatableClass(using Context) = Reflect_registerInstantiatableClassR.symbol + + @threadUnsafe lazy val ReflectSelectableType: TypeRef = requiredClassRef("scala.reflect.Selectable") + def ReflectSelectableClass(using Context) = ReflectSelectableType.symbol.asClass + @threadUnsafe lazy val ReflectSelectable_selectDynamicR = ReflectSelectableClass.requiredMethodRef("selectDynamic") + def ReflectSelectable_selectDynamic(using Context) = ReflectSelectable_selectDynamicR.symbol + @threadUnsafe lazy val ReflectSelectable_applyDynamicR = ReflectSelectableClass.requiredMethodRef("applyDynamic") + def ReflectSelectable_applyDynamic(using Context) = ReflectSelectable_applyDynamicR.symbol + + @threadUnsafe lazy val ReflectSelectableModuleRef = requiredModuleRef("scala.reflect.Selectable") + def ReflectSelectableModule(using Context) = ReflectSelectableModuleRef.symbol + @threadUnsafe lazy val ReflectSelectable_reflectiveSelectableR = ReflectSelectableModule.requiredMethodRef("reflectiveSelectable") + def ReflectSelectable_reflectiveSelectable(using Context) = ReflectSelectable_reflectiveSelectableR.symbol + + @threadUnsafe lazy val SelectableModuleRef = requiredModuleRef("scala.Selectable") + def SelectableModule(using Context) = SelectableModuleRef.symbol + @threadUnsafe lazy val Selectable_reflectiveSelectableFromLangReflectiveCallsR = SelectableModule.requiredMethodRef("reflectiveSelectableFromLangReflectiveCalls") + def Selectable_reflectiveSelectableFromLangReflectiveCalls(using Context) = Selectable_reflectiveSelectableFromLangReflectiveCallsR.symbol + + private var allRefClassesCache: Set[Symbol] = _ + def allRefClasses(using Context): Set[Symbol] = { + if (allRefClassesCache == null) { + val baseNames = List("Object", "Boolean", "Character", "Byte", "Short", + "Int", "Long", "Float", "Double") + val fullNames = baseNames.flatMap { base => + List(s"scala.runtime.${base}Ref", s"scala.runtime.Volatile${base}Ref") + } + allRefClassesCache = fullNames.map(name => requiredClass(name)).toSet + } + allRefClassesCache + } + + /** Definitions related to scala.Enumeration. */ + object scalaEnumeration { + val nmeValue = termName("Value") + val nmeVal = termName("Val") + val hasNext = termName("hasNext") + val next = termName("next") + + @threadUnsafe lazy val EnumerationClass = requiredClass("scala.Enumeration") + @threadUnsafe lazy val Enumeration_Value_NoArg = EnumerationClass.requiredValue(nmeValue) + @threadUnsafe lazy val Enumeration_Value_IntArg = EnumerationClass.requiredMethod(nmeValue, List(defn.IntType)) + @threadUnsafe lazy val Enumeration_Value_StringArg = EnumerationClass.requiredMethod(nmeValue, List(defn.StringType)) + @threadUnsafe lazy val Enumeration_Value_IntStringArg = EnumerationClass.requiredMethod(nmeValue, List(defn.IntType, defn.StringType)) + @threadUnsafe lazy val Enumeration_nextName = EnumerationClass.requiredMethod(termName("nextName")) + + @threadUnsafe lazy val EnumerationValClass = EnumerationClass.requiredClass("Val") + @threadUnsafe lazy val Enumeration_Val_NoArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, Nil) + @threadUnsafe lazy val Enumeration_Val_IntArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, List(defn.IntType)) + @threadUnsafe lazy val Enumeration_Val_StringArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, List(defn.StringType)) + @threadUnsafe lazy val Enumeration_Val_IntStringArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, List(defn.IntType, defn.StringType)) + + def isValueMethod(sym: Symbol)(using Context): Boolean = + sym.name == nmeValue && sym.owner == EnumerationClass + + def isValueMethodNoName(sym: Symbol)(using Context): Boolean = + isValueMethod(sym) && (sym == Enumeration_Value_NoArg || sym == Enumeration_Value_IntArg) + + def isValueMethodName(sym: Symbol)(using Context): Boolean = + isValueMethod(sym) && (sym == Enumeration_Value_StringArg || sym == Enumeration_Value_IntStringArg) + + def isValCtor(sym: Symbol)(using Context): Boolean = + sym.isClassConstructor && sym.owner == EnumerationValClass + + def isValCtorNoName(sym: Symbol)(using Context): Boolean = + isValCtor(sym) && (sym == Enumeration_Val_NoArg || sym == Enumeration_Val_IntArg) + + def isValCtorName(sym: Symbol)(using Context): Boolean = + isValCtor(sym) && (sym == Enumeration_Val_StringArg || sym == Enumeration_Val_IntStringArg) + } + + /** Definitions related to the treatment of JUnit bootstrappers. */ + object junit { + @threadUnsafe lazy val TestAnnotType: TypeRef = requiredClassRef("org.junit.Test") + def TestAnnotClass(using Context): ClassSymbol = TestAnnotType.symbol.asClass + + @threadUnsafe lazy val BeforeAnnotType: TypeRef = requiredClassRef("org.junit.Before") + def BeforeAnnotClass(using Context): ClassSymbol = BeforeAnnotType.symbol.asClass + + @threadUnsafe lazy val AfterAnnotType: TypeRef = requiredClassRef("org.junit.After") + def AfterAnnotClass(using Context): ClassSymbol = AfterAnnotType.symbol.asClass + + @threadUnsafe lazy val BeforeClassAnnotType: TypeRef = requiredClassRef("org.junit.BeforeClass") + def BeforeClassAnnotClass(using Context): ClassSymbol = BeforeClassAnnotType.symbol.asClass + + @threadUnsafe lazy val AfterClassAnnotType: TypeRef = requiredClassRef("org.junit.AfterClass") + def AfterClassAnnotClass(using Context): ClassSymbol = AfterClassAnnotType.symbol.asClass + + @threadUnsafe lazy val IgnoreAnnotType: TypeRef = requiredClassRef("org.junit.Ignore") + def IgnoreAnnotClass(using Context): ClassSymbol = IgnoreAnnotType.symbol.asClass + + @threadUnsafe lazy val BootstrapperType: TypeRef = requiredClassRef("org.scalajs.junit.Bootstrapper") + + @threadUnsafe lazy val TestMetadataType: TypeRef = requiredClassRef("org.scalajs.junit.TestMetadata") + + @threadUnsafe lazy val NoSuchMethodExceptionType: TypeRef = requiredClassRef("java.lang.NoSuchMethodException") + + @threadUnsafe lazy val FutureType: TypeRef = requiredClassRef("scala.concurrent.Future") + def FutureClass(using Context): ClassSymbol = FutureType.symbol.asClass + + @threadUnsafe private lazy val FutureModule_successfulR = requiredModule("scala.concurrent.Future").requiredMethodRef("successful") + def FutureModule_successful(using Context): Symbol = FutureModule_successfulR.symbol + + @threadUnsafe private lazy val SuccessModule_applyR = requiredModule("scala.util.Success").requiredMethodRef(nme.apply) + def SuccessModule_apply(using Context): Symbol = SuccessModule_applyR.symbol + } + +} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala b/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala new file mode 100644 index 000000000000..73a150c60290 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala @@ -0,0 +1,428 @@ +package dotty.tools.backend.sjs + +import scala.language.unsafeNulls + +import scala.collection.mutable + +import dotty.tools.dotc.core._ +import Contexts._ +import Flags._ +import Types._ +import Symbols._ +import NameOps._ +import Names._ +import StdNames._ + +import dotty.tools.dotc.transform.sjs.JSSymUtils._ + +import org.scalajs.ir +import org.scalajs.ir.{Trees => js, Types => jstpe} +import org.scalajs.ir.Names.{LocalName, LabelName, FieldName, SimpleMethodName, MethodName, ClassName} +import org.scalajs.ir.OriginalName +import org.scalajs.ir.OriginalName.NoOriginalName +import org.scalajs.ir.UTF8String + +import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions + +import JSDefinitions.jsdefn + +/** Encoding of symbol names for JavaScript + * + * Some issues that this encoding solves: + * * Overloading: encode the full signature in the JS name + * * Same scope for fields and methods of a class + * * Global access to classes and modules (by their full name) + * + * @author Sébastien Doeraene + */ +object JSEncoding { + + /** Name of the capture param storing the JS super class. + * + * This is used by the dispatchers of exposed JS methods and properties of + * nested JS classes when they need to perform a super call. Other super + * calls (in the actual bodies of the methods, not in the dispatchers) do + * not use this value, since they are implemented as static methods that do + * not have access to it. Instead, they get the JS super class value through + * the magic method inserted by `ExplicitLocalJS`, leveraging `lambdalift` + * to ensure that it is properly captured. + * + * Using this identifier is only allowed if it was reserved in the current + * local name scope using [[reserveLocalName]]. Otherwise, this name can + * clash with another local identifier. + */ + final val JSSuperClassParamName = LocalName("superClass$") + + private val ScalaRuntimeNothingClassName = ClassName("scala.runtime.Nothing$") + private val ScalaRuntimeNullClassName = ClassName("scala.runtime.Null$") + + private val dynamicImportForwarderSimpleName = SimpleMethodName("dynamicImport$") + + // Fresh local name generator ---------------------------------------------- + + class LocalNameGenerator { + import LocalNameGenerator._ + + private val usedLocalNames = mutable.Set.empty[LocalName] + private val localSymbolNames = mutable.Map.empty[Symbol, LocalName] + private val usedLabelNames = mutable.Set.empty[LabelName] + private val labelSymbolNames = mutable.Map.empty[Symbol, LabelName] + private var returnLabelName: Option[LabelName] = None + + def reserveLocalName(name: LocalName): Unit = { + require(usedLocalNames.isEmpty, + s"Trying to reserve the name '$name' but names have already been allocated") + usedLocalNames += name + } + + private def freshNameGeneric[N <: ir.Names.Name](base: N, usedNamesSet: mutable.Set[N])( + withSuffix: (N, String) => N): N = { + + var suffix = 1 + var result = base + while (usedNamesSet(result)) { + suffix += 1 + result = withSuffix(base, "$" + suffix) + } + usedNamesSet += result + result + } + + def freshName(base: LocalName): LocalName = + freshNameGeneric(base, usedLocalNames)(_.withSuffix(_)) + + def freshName(base: String): LocalName = + freshName(LocalName(base)) + + def freshLocalIdent()(implicit pos: ir.Position): js.LocalIdent = + js.LocalIdent(freshName(xLocalName)) + + def freshLocalIdent(base: LocalName)(implicit pos: ir.Position): js.LocalIdent = + js.LocalIdent(freshName(base)) + + def freshLocalIdent(base: String)(implicit pos: ir.Position): js.LocalIdent = + freshLocalIdent(LocalName(base)) + + def freshLocalIdent(base: TermName)(implicit pos: ir.Position): js.LocalIdent = + freshLocalIdent(base.mangledString) + + def localSymbolName(sym: Symbol)(using Context): LocalName = { + localSymbolNames.getOrElseUpdate(sym, { + /* The emitter does not like local variables that start with a '$', + * because it needs to encode them not to clash with emitter-generated + * names. There are two common cases, caused by scalac-generated names: + * - the `$this` parameter of tailrec methods and "extension" methods of + * AnyVals, which scalac knows as `nme.SELF`, and + * - the `$outer` parameter of inner class constructors, which scalac + * knows as `nme.OUTER`. + * We choose different base names for those two cases instead, so that + * the avoidance mechanism of the emitter doesn't happen as a common + * case. It can still happen for user-defined variables, but in that case + * the emitter will deal with it. + */ + val base = sym.name match { + case nme.SELF => "this$" // instead of $this + case nme.OUTER => "outer" // instead of $outer + case name => name.mangledString + } + freshName(base) + }) + } + + def freshLabelName(base: LabelName): LabelName = + freshNameGeneric(base, usedLabelNames)(_.withSuffix(_)) + + def freshLabelName(base: String): LabelName = + freshLabelName(LabelName(base)) + + def freshLabelIdent(base: String)(implicit pos: ir.Position): js.LabelIdent = + js.LabelIdent(freshLabelName(base)) + + def labelSymbolName(sym: Symbol)(using Context): LabelName = + labelSymbolNames.getOrElseUpdate(sym, freshLabelName(sym.javaSimpleName)) + + def getEnclosingReturnLabel()(implicit pos: ir.Position): js.LabelIdent = { + if (returnLabelName.isEmpty) + returnLabelName = Some(freshLabelName("_return")) + js.LabelIdent(returnLabelName.get) + } + + /* If this `LocalNameGenerator` has a `returnLabelName` (often added in the + * construction of the `body` argument), wrap the resulting js.Tree to use that label. + */ + def makeLabeledIfRequiresEnclosingReturn(tpe: jstpe.Type)(body: js.Tree)(implicit pos: ir.Position): js.Tree = { + returnLabelName match { + case None => + body + case Some(labelName) => + js.Labeled(js.LabelIdent(labelName), tpe, body) + } + } + } + + private object LocalNameGenerator { + private val xLocalName = LocalName("x") + } + + // Encoding methods ---------------------------------------------------------- + + def encodeLabelSym(sym: Symbol)( + implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LabelIdent = { + require(sym.is(Flags.Label), "encodeLabelSym called with non-label symbol: " + sym) + js.LabelIdent(localNames.labelSymbolName(sym)) + } + + def encodeFieldSym(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.FieldIdent = + js.FieldIdent(FieldName(encodeFieldSymAsString(sym))) + + def encodeFieldSymAsStringLiteral(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.StringLiteral = + js.StringLiteral(encodeFieldSymAsString(sym)) + + private def encodeFieldSymAsString(sym: Symbol)(using Context): String = { + require(sym.owner.isClass && sym.isTerm && !sym.isOneOf(MethodOrModule), + "encodeFieldSym called with non-field symbol: " + sym) + + val name0 = sym.javaSimpleName + if (name0.charAt(name0.length() - 1) != ' ') name0 + else name0.substring(0, name0.length() - 1) + } + + def encodeMethodSym(sym: Symbol, reflProxy: Boolean = false)( + implicit ctx: Context, pos: ir.Position): js.MethodIdent = { + require(sym.is(Flags.Method), "encodeMethodSym called with non-method symbol: " + sym) + + val tpe = sym.info + + val paramTypeRefs0 = tpe.firstParamTypes.map(paramOrResultTypeRef(_)) + + val hasExplicitThisParameter = !sym.is(JavaStatic) && sym.owner.isNonNativeJSClass + val paramTypeRefs = + if (!hasExplicitThisParameter) paramTypeRefs0 + else encodeClassRef(sym.owner) :: paramTypeRefs0 + + val name = sym.name + val simpleName = SimpleMethodName(name.mangledString) + + val methodName = { + if (sym.isClassConstructor) + MethodName.constructor(paramTypeRefs) + else if (reflProxy) + MethodName.reflectiveProxy(simpleName, paramTypeRefs) + else + MethodName(simpleName, paramTypeRefs, paramOrResultTypeRef(patchedResultType(sym))) + } + + js.MethodIdent(methodName) + } + + def encodeJSNativeMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { + require(sym.hasAnnotation(jsdefn.JSNativeAnnot), + "encodeJSNativeMemberSym called with non-native symbol: " + sym) + if (sym.is(Method)) + encodeMethodSym(sym) + else + encodeFieldSymAsMethod(sym) + } + + def encodeStaticMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { + require(sym.is(Flags.JavaStaticTerm), + "encodeStaticMemberSym called with non-static symbol: " + sym) + encodeFieldSymAsMethod(sym) + } + + private def encodeFieldSymAsMethod(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { + val name = sym.name + val resultTypeRef = paramOrResultTypeRef(sym.info) + val methodName = MethodName(name.mangledString, Nil, resultTypeRef) + js.MethodIdent(methodName) + } + + def encodeDynamicImportForwarderIdent(params: List[Symbol])(using Context, ir.Position): js.MethodIdent = { + val paramTypeRefs = params.map(sym => paramOrResultTypeRef(sym.info)) + val resultTypeRef = jstpe.ClassRef(ir.Names.ObjectClass) + val methodName = MethodName(dynamicImportForwarderSimpleName, paramTypeRefs, resultTypeRef) + js.MethodIdent(methodName) + } + + /** Computes the type ref for a type, to be used in a method signature. */ + private def paramOrResultTypeRef(tpe: Type)(using Context): jstpe.TypeRef = + toParamOrResultTypeRef(toTypeRef(tpe)) + + def encodeLocalSym(sym: Symbol)( + implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LocalIdent = { + require(!sym.owner.isClass && sym.isTerm && !sym.is(Flags.Method) && !sym.is(Flags.Module), + "encodeLocalSym called with non-local symbol: " + sym) + js.LocalIdent(localNames.localSymbolName(sym)) + } + + def encodeClassType(sym: Symbol)(using Context): jstpe.Type = { + if (sym == defn.ObjectClass) jstpe.AnyType + else if (sym.isJSType) jstpe.AnyType + else { + assert(sym != defn.ArrayClass, + "encodeClassType() cannot be called with ArrayClass") + jstpe.ClassType(encodeClassName(sym)) + } + } + + def encodeClassRef(sym: Symbol)(using Context): jstpe.ClassRef = + jstpe.ClassRef(encodeClassName(sym)) + + def encodeClassNameIdent(sym: Symbol)( + implicit ctx: Context, pos: ir.Position): js.ClassIdent = + js.ClassIdent(encodeClassName(sym)) + + def encodeClassName(sym: Symbol)(using Context): ClassName = { + val sym1 = + if (sym.isAllOf(ModuleClass | JavaDefined)) sym.linkedClass + else sym + + /* Some rewirings: + * - scala.runtime.BoxedUnit to java.lang.Void, as the IR expects. + * BoxedUnit$ is a JVM artifact. + * - scala.Nothing to scala.runtime.Nothing$. + * - scala.Null to scala.runtime.Null$. + */ + if (sym1 == defn.BoxedUnitClass) + ir.Names.BoxedUnitClass + else if (sym1 == defn.NothingClass) + ScalaRuntimeNothingClassName + else if (sym1 == defn.NullClass) + ScalaRuntimeNullClassName + else + ClassName(sym1.javaClassName) + } + + /** Converts a general TypeRef to a TypeRef to be used in a method signature. */ + def toParamOrResultTypeRef(typeRef: jstpe.TypeRef): jstpe.TypeRef = { + typeRef match { + case jstpe.ClassRef(ScalaRuntimeNullClassName) => jstpe.NullRef + case jstpe.ClassRef(ScalaRuntimeNothingClassName) => jstpe.NothingRef + case _ => typeRef + } + } + + def toIRTypeAndTypeRef(tp: Type)(using Context): (jstpe.Type, jstpe.TypeRef) = { + val typeRefInternal = toTypeRefInternal(tp) + (toIRTypeInternal(typeRefInternal), typeRefInternal._1) + } + + def toIRType(tp: Type)(using Context): jstpe.Type = + toIRTypeInternal(toTypeRefInternal(tp)) + + private def toIRTypeInternal(typeRefInternal: (jstpe.TypeRef, Symbol))(using Context): jstpe.Type = { + typeRefInternal._1 match { + case jstpe.PrimRef(irTpe) => + irTpe + + case typeRef: jstpe.ClassRef => + val sym = typeRefInternal._2 + if (sym == defn.ObjectClass || sym.isJSType) + jstpe.AnyType + else if (sym == defn.NothingClass) + jstpe.NothingType + else if (sym == defn.NullClass) + jstpe.NullType + else + jstpe.ClassType(typeRef.className) + + case typeRef: jstpe.ArrayTypeRef => + jstpe.ArrayType(typeRef) + } + } + + def toTypeRef(tp: Type)(using Context): jstpe.TypeRef = + toTypeRefInternal(tp)._1 + + private def toTypeRefInternal(tp: Type)(using Context): (jstpe.TypeRef, Symbol) = { + def primitiveOrClassToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = { + assert(sym.isClass, sym) + //assert(sym != defn.ArrayClass || isCompilingArray, sym) + val typeRef = if (sym.isPrimitiveValueClass) { + if (sym == defn.UnitClass) jstpe.VoidRef + else if (sym == defn.BooleanClass) jstpe.BooleanRef + else if (sym == defn.CharClass) jstpe.CharRef + else if (sym == defn.ByteClass) jstpe.ByteRef + else if (sym == defn.ShortClass) jstpe.ShortRef + else if (sym == defn.IntClass) jstpe.IntRef + else if (sym == defn.LongClass) jstpe.LongRef + else if (sym == defn.FloatClass) jstpe.FloatRef + else if (sym == defn.DoubleClass) jstpe.DoubleRef + else throw new Exception(s"unknown primitive value class $sym") + } else { + encodeClassRef(sym) + } + (typeRef, sym) + } + + /** + * When compiling Array.scala, the type parameter T is not erased and shows up in method + * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference. + */ + def nonClassTypeRefToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = { + //assert(sym.isType && isCompilingArray, sym) + (jstpe.ClassRef(ir.Names.ObjectClass), defn.ObjectClass) + } + + tp.widenDealias match { + // Array type such as Array[Int] (kept by erasure) + case JavaArrayType(el) => + val elTypeRef = toTypeRefInternal(el) + (jstpe.ArrayTypeRef.of(elTypeRef._1), elTypeRef._2) + + case t: TypeRef => + if (!t.symbol.isClass) nonClassTypeRefToTypeRef(t.symbol) // See comment on nonClassTypeRefToBType + else primitiveOrClassToTypeRef(t.symbol) // Common reference to a type such as scala.Int or java.lang.String + + case Types.ClassInfo(_, sym, _, _, _) => + /* We get here, for example, for genLoadModule, which invokes + * toTypeKind(moduleClassSymbol.info) + */ + primitiveOrClassToTypeRef(sym) + + /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for + * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning. + * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala. + */ + case a @ AnnotatedType(t, _) => + //debuglog(s"typeKind of annotated type $a") + toTypeRefInternal(t) + } + } + + /** Patches the result type of a method symbol to sanitize it. + * + * For some reason, dotc thinks that the `info.resultType`of an + * `isConstructor` method (for classes or traits) is the enclosing class + * or trait, but the bodies and usages act as if the result type was `Unit`. + * + * This method returns `UnitType` for constructor methods, and otherwise + * `sym.info.resultType`. + */ + def patchedResultType(sym: Symbol)(using Context): Type = + if (sym.isConstructor) defn.UnitType + else sym.info.resultType + + def originalNameOfLocal(sym: Symbol)( + implicit ctx: Context, localNames: LocalNameGenerator): OriginalName = { + val irName = localNames.localSymbolName(sym) + val originalName = UTF8String(sym.name.unexpandedName.toString) + if (UTF8String.equals(originalName, irName.encoded)) NoOriginalName + else OriginalName(originalName) + } + + def originalNameOfField(sym: Symbol)(using Context): OriginalName = + originalNameOf(sym.name) + + def originalNameOfMethod(sym: Symbol)(using Context): OriginalName = + originalNameOf(sym.name) + + def originalNameOfClass(sym: Symbol)(using Context): OriginalName = + originalNameOf(sym.fullName) + + private def originalNameOf(name: Name): OriginalName = { + val originalName = name.unexpandedName.toString + if (originalName == name.mangledString) NoOriginalName + else OriginalName(originalName) + } +} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala new file mode 100644 index 000000000000..78412999bb34 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala @@ -0,0 +1,1025 @@ +package dotty.tools.backend.sjs + +import scala.language.unsafeNulls + +import scala.annotation.tailrec +import scala.collection.mutable + +import dotty.tools.dotc.core._ + +import Contexts._ +import Decorators._ +import Denotations._ +import Flags._ +import Names._ +import NameKinds.DefaultGetterName +import NameOps._ +import Phases._ +import Symbols._ +import Types._ +import TypeErasure.ErasedValueType + +import dotty.tools.dotc.util.{SourcePosition, SrcPos} +import dotty.tools.dotc.report + +import org.scalajs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} +import org.scalajs.ir.Names.DefaultModuleID +import org.scalajs.ir.OriginalName.NoOriginalName +import org.scalajs.ir.Position.NoPosition +import org.scalajs.ir.Trees.OptimizerHints + +import dotty.tools.dotc.transform.sjs.JSExportUtils._ +import dotty.tools.dotc.transform.sjs.JSSymUtils._ + +import JSEncoding._ + +final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { + import jsCodeGen._ + import positionConversions._ + + /** Info for a non-member export. */ + sealed trait ExportInfo { + val pos: SourcePosition + } + + final case class TopLevelExportInfo(moduleID: String, jsName: String)(val pos: SourcePosition) extends ExportInfo + final case class StaticExportInfo(jsName: String)(val pos: SourcePosition) extends ExportInfo + + private sealed trait ExportKind + + private object ExportKind { + case object Module extends ExportKind + case object JSClass extends ExportKind + case object Constructor extends ExportKind + case object Method extends ExportKind + case object Property extends ExportKind + case object Field extends ExportKind + + def apply(sym: Symbol): ExportKind = { + if (sym.is(Flags.Module) && sym.isStatic) Module + else if (sym.isClass) JSClass + else if (sym.isConstructor) Constructor + else if (!sym.is(Flags.Method)) Field + else if (sym.isJSProperty) Property + else Method + } + } + + private def topLevelExportsOf(sym: Symbol): List[TopLevelExportInfo] = { + def isScalaClass(sym: Symbol): Boolean = + sym.isClass && !sym.isOneOf(Module | Trait) && !sym.isJSType + + if (isScalaClass(sym)) { + // Scala classes are never exported; their constructors are + Nil + } else if (sym.is(Accessor) || sym.is(Module, butNot = ModuleClass)) { + /* - Accessors receive the `@JSExportTopLevel` annotation of their associated field, + * but only the field is really exported. + * - Module values are not exported; their module class takes care of the export. + */ + Nil + } else { + val symForAnnot = + if (sym.isConstructor && isScalaClass(sym.owner)) sym.owner + else sym + + symForAnnot.annotations.collect { + case annot if annot.symbol == jsdefn.JSExportTopLevelAnnot => + val jsName = annot.argumentConstantString(0).get + val moduleID = annot.argumentConstantString(1).getOrElse(DefaultModuleID) + TopLevelExportInfo(moduleID, jsName)(annot.tree.sourcePos) + } + } + } + + private def staticExportsOf(sym: Symbol): List[StaticExportInfo] = { + if (sym.is(Accessor)) { + Nil + } else { + sym.annotations.collect { + case annot if annot.symbol == jsdefn.JSExportStaticAnnot => + val jsName = annot.argumentConstantString(0).getOrElse { + sym.defaultJSName + } + StaticExportInfo(jsName)(annot.tree.sourcePos) + } + } + } + + private def checkSameKind(tups: List[(ExportInfo, Symbol)]): Option[ExportKind] = { + assert(tups.nonEmpty, "must have at least one export") + + val firstSym = tups.head._2 + val overallKind = ExportKind(firstSym) + var bad = false + + for ((info, sym) <- tups.tail) { + val kind = ExportKind(sym) + + if (kind != overallKind) { + bad = true + report.error( + em"export overload conflicts with export of $firstSym: they are of different types (${kind.tryToShow} / ${overallKind.tryToShow})", + info.pos) + } + } + + if (bad) None + else Some(overallKind) + } + + private def checkSingleField(tups: List[(ExportInfo, Symbol)]): Symbol = { + assert(tups.nonEmpty, "must have at least one export") + + val firstSym = tups.head._2 + + for ((info, _) <- tups.tail) { + report.error( + em"export overload conflicts with export of $firstSym: a field may not share its exported name with another export", + info.pos) + } + + firstSym + } + + def genTopLevelExports(classSym: ClassSymbol): List[js.TopLevelExportDef] = { + val exports = for { + sym <- classSym :: classSym.info.decls.toList + info <- topLevelExportsOf(sym) + } yield { + (info, sym) + } + + (for { + (info, tups) <- exports.groupBy(_._1) + kind <- checkSameKind(tups) + } yield { + import ExportKind._ + + implicit val pos = info.pos + + kind match { + case Module => + js.TopLevelModuleExportDef(info.moduleID, info.jsName) + + case JSClass => + assert(classSym.isNonNativeJSClass, "found export on non-JS class") + js.TopLevelJSClassExportDef(info.moduleID, info.jsName) + + case Constructor | Method => + val exported = tups.map(_._2) + + val methodDef = withNewLocalNameScope { + genExportMethod(exported, JSName.Literal(info.jsName), static = true) + } + + js.TopLevelMethodExportDef(info.moduleID, methodDef) + + case Property => + throw new AssertionError("found top-level exported property") + + case Field => + val sym = checkSingleField(tups) + js.TopLevelFieldExportDef(info.moduleID, info.jsName, encodeFieldSym(sym)) + } + }).toList + } + + def genStaticExports(classSym: Symbol): List[js.MemberDef] = { + val exports = for { + sym <- classSym.info.decls.toList + info <- staticExportsOf(sym) + } yield { + (info, sym) + } + + (for { + (info, tups) <- exports.groupBy(_._1) + kind <- checkSameKind(tups) + } yield { + def alts = tups.map(_._2) + + implicit val pos = info.pos + + import ExportKind._ + + kind match { + case Method => + genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = false, alts, static = true) + + case Property => + genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = true, alts, static = true) + + case Field => + val sym = checkSingleField(tups) + + // static fields must always be mutable + val flags = js.MemberFlags.empty + .withNamespace(js.MemberNamespace.PublicStatic) + .withMutable(true) + val name = js.StringLiteral(info.jsName) + val irTpe = genExposedFieldIRType(sym) + js.JSFieldDef(flags, name, irTpe) + + case kind => + throw new AssertionError(s"unexpected static export kind: $kind") + } + }).toList + } + + /** Generates exported methods and properties for a class. + * + * @param classSym symbol of the class we export for + */ + def genMemberExports(classSym: ClassSymbol): List[js.MemberDef] = { + val classInfo = classSym.info + val allExports = classInfo.memberDenots(takeAllFilter, { (name, buf) => + if (isExportName(name)) + buf ++= classInfo.member(name).alternatives + }) + + val newlyDeclaredExports = if (classSym.superClass == NoSymbol) { + allExports + } else { + allExports.filterNot { denot => + classSym.superClass.info.member(denot.name).hasAltWith(_.info =:= denot.info) + } + } + + val newlyDeclaredExportNames = newlyDeclaredExports.map(_.name.toTermName).toList.distinct + + newlyDeclaredExportNames.map(genMemberExport(classSym, _)) + } + + private def genMemberExport(classSym: ClassSymbol, name: TermName): js.MemberDef = { + /* This used to be `.member(name)`, but it caused #3538, since we were + * sometimes selecting mixin forwarders, whose type history does not go + * far enough back in time to see varargs. We now explicitly exclude + * mixed-in members in addition to bridge methods (the latter are always + * excluded by `.member(name)`). + */ + val alts = classSym + .findMemberNoShadowingBasedOnFlags(name, classSym.appliedRef, required = Method, excluded = Bridge | MixedIn) + .alternatives + + assert(!alts.isEmpty, + em"""Ended up with no alternatives for ${classSym.fullName}::$name. + |Original set was ${alts} with types ${alts.map(_.info)}""") + + val (jsName, isProp) = exportNameInfo(name) + + // Check if we have a conflicting export of the other kind + val conflicting = classSym.info.member(makeExportName(jsName, !isProp)) + + if (conflicting.exists) { + val kind = if (isProp) "property" else "method" + val conflictingMember = conflicting.alternatives.head.symbol.fullName + val errorPos: SrcPos = alts.map(_.symbol).filter(_.owner == classSym) match { + case Nil => classSym + case altsInClass => altsInClass.minBy(_.span.point) + } + report.error(em"Exported $kind $jsName conflicts with $conflictingMember", errorPos) + } + + genMemberExportOrDispatcher(JSName.Literal(jsName), isProp, alts.map(_.symbol), static = false) + } + + def genJSClassDispatchers(classSym: Symbol, dispatchMethodsNames: List[JSName]): List[js.MemberDef] = { + dispatchMethodsNames.map(genJSClassDispatcher(classSym, _)) + } + + private def genJSClassDispatcher(classSym: Symbol, name: JSName): js.MemberDef = { + val alts = classSym.info.membersBasedOnFlags(required = Method, excluded = Bridge) + .map(_.symbol) + .filter { sym => + /* scala-js#3939: Object is not a "real" superclass of JS types. + * as such, its methods do not participate in overload resolution. + * An exception is toString, which is handled specially in genExportMethod. + */ + sym.owner != defn.ObjectClass && sym.jsName == name + } + .toList + + assert(!alts.isEmpty, s"Ended up with no alternatives for ${classSym.fullName}::$name.") + + val (propSyms, methodSyms) = alts.partition(_.isJSProperty) + val isProp = propSyms.nonEmpty + + if (isProp && methodSyms.nonEmpty) { + val firstAlt = alts.head + report.error( + em"Conflicting properties and methods for ${classSym.fullName}::$name.", + firstAlt.srcPos) + implicit val pos = firstAlt.span + js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None) + } else { + genMemberExportOrDispatcher(name, isProp, alts, static = false) + } + } + + private def genMemberExportOrDispatcher(jsName: JSName, isProp: Boolean, + alts: List[Symbol], static: Boolean): js.MemberDef = { + withNewLocalNameScope { + if (isProp) + genExportProperty(alts, jsName, static) + else + genExportMethod(alts, jsName, static) + } + } + + private def genExportProperty(alts: List[Symbol], jsName: JSName, static: Boolean): js.JSPropertyDef = { + assert(!alts.isEmpty, s"genExportProperty with empty alternatives for $jsName") + + implicit val pos: Position = alts.head.span + + val namespace = + if (static) js.MemberNamespace.PublicStatic + else js.MemberNamespace.Public + val flags = js.MemberFlags.empty.withNamespace(namespace) + + /* Separate getters and setters. Since we only have getters and setters, we + * simply test the param list size, which is faster than using the full isJSGetter. + */ + val (getter, setters) = alts.partition(_.info.paramInfoss.head.isEmpty) + + // We can have at most one getter + if (getter.sizeIs > 1) + reportCannotDisambiguateError(jsName, alts) + + val getterBody = getter.headOption.map { getterSym => + genApplyForSingleExported(new FormalArgsRegistry(0, false), new ExportedSymbol(getterSym, static), static) + } + + val setterArgAndBody = { + if (setters.isEmpty) { + None + } else { + val formalArgsRegistry = new FormalArgsRegistry(1, false) + val (List(arg), None) = formalArgsRegistry.genFormalArgs(): @unchecked + val body = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, + setters.map(new ExportedSymbol(_, static)), jstpe.AnyType, None) + Some((arg, body)) + } + } + + js.JSPropertyDef(flags, genExpr(jsName)(alts.head.sourcePos), getterBody, setterArgAndBody) + } + + private def genExportMethod(alts0: List[Symbol], jsName: JSName, static: Boolean)(using Context): js.JSMethodDef = { + assert(alts0.nonEmpty, "need at least one alternative to generate exporter method") + + implicit val pos: SourcePosition = alts0.head.sourcePos + + val namespace = + if (static) js.MemberNamespace.PublicStatic + else js.MemberNamespace.Public + val flags = js.MemberFlags.empty.withNamespace(namespace) + + // toString() is always exported. We might need to add it here to get correct overloading. + val alts = jsName match { + case JSName.Literal("toString") if alts0.forall(_.info.paramInfoss.exists(_.nonEmpty)) => + defn.Any_toString :: alts0 + case _ => + alts0 + } + + val overloads = alts.map(new ExportedSymbol(_, static)) + + val (formalArgs, restParam, body) = + genOverloadDispatch(jsName, overloads, jstpe.AnyType) + + js.JSMethodDef(flags, genExpr(jsName), formalArgs, restParam, body)( + OptimizerHints.empty, None) + } + + def genOverloadDispatch(jsName: JSName, alts: List[Exported], tpe: jstpe.Type)( + using pos: SourcePosition): (List[js.ParamDef], Option[js.ParamDef], js.Tree) = { + + // Create the formal args registry + val hasVarArg = alts.exists(_.hasRepeatedParam) + val minArgc = alts.map(_.minArgc).min + val maxNonRepeatedArgc = alts.map(_.maxNonRepeatedArgc).max + val needsRestParam = maxNonRepeatedArgc != minArgc || hasVarArg + val formalArgsRegistry = new FormalArgsRegistry(minArgc, needsRestParam) + + // Generate the list of formal parameters + val (formalArgs, restParam) = formalArgsRegistry.genFormalArgs() + + /* Generate the body + * We have a fast-path for methods that are not overloaded. In addition to + * being a fast path, it does a better job than `genExportMethodMultiAlts` + * when the only alternative has default parameters, because it avoids a + * spurious dispatch. + * In scalac, the spurious dispatch was avoided by a more elaborate case + * generation in `genExportMethod`, which was very convoluted and was not + * ported to dotc. + */ + val body = + if (alts.tail.isEmpty) alts.head.genBody(formalArgsRegistry) + else genExportMethodMultiAlts(formalArgsRegistry, maxNonRepeatedArgc, alts, tpe, jsName) + + (formalArgs, restParam, body) + } + + private def genExportMethodMultiAlts(formalArgsRegistry: FormalArgsRegistry, + maxNonRepeatedArgc: Int, alts: List[Exported], tpe: jstpe.Type, jsName: JSName)( + implicit pos: SourcePosition): js.Tree = { + + // Generate tuples (argc, method) + val methodArgCounts = for { + alt <- alts + argc <- alt.minArgc to (if (alt.hasRepeatedParam) maxNonRepeatedArgc else alt.maxNonRepeatedArgc) + } yield { + (argc, alt) + } + + // Create a list of (argCount -> methods), sorted by argCount (methods may appear multiple times) + val methodsByArgCount: List[(Int, List[Exported])] = + methodArgCounts.groupMap(_._1)(_._2).toList.sortBy(_._1) // sort for determinism + + val altsWithVarArgs = alts.filter(_.hasRepeatedParam) + + // Generate a case block for each (argCount, methods) tuple + // TODO? We could optimize this a bit by putting together all the `argCount`s that have the same methods + // (Scala.js for scalac does that, but the code is very convoluted and it's not clear that it is worth it). + val cases = for { + (argc, methods) <- methodsByArgCount + if methods != altsWithVarArgs // exclude default case we're generating anyways for varargs + } yield { + // body of case to disambiguates methods with current count + val caseBody = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, methods, tpe, Some(argc)) + List(js.IntLiteral(argc - formalArgsRegistry.minArgc)) -> caseBody + } + + def defaultCase = { + if (altsWithVarArgs.isEmpty) + genThrowTypeError() + else + genOverloadDispatchSameArgc(jsName, formalArgsRegistry, altsWithVarArgs, tpe, None) + } + + val body = { + if (cases.isEmpty) { + defaultCase + } else if (cases.tail.isEmpty && altsWithVarArgs.isEmpty) { + cases.head._2 + } else { + val restArgRef = formalArgsRegistry.genRestArgRef() + js.Match( + js.AsInstanceOf(js.JSSelect(restArgRef, js.StringLiteral("length")), jstpe.IntType), + cases, + defaultCase)( + tpe) + } + } + + body + } + + /** Resolves method calls to [[alts]] while assuming they have the same parameter count. + * + * @param jsName + * The JS name of the method, for error reporting + * @param formalArgsRegistry + * The registry of all the formal arguments + * @param alts + * Alternative methods + * @param tpe + * Result type + * @param maxArgc + * Maximum number of arguments to use for disambiguation + */ + private def genOverloadDispatchSameArgc(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, + alts: List[Exported], tpe: jstpe.Type, maxArgc: Option[Int]): js.Tree = { + genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex = 0, maxArgc) + } + + /** Resolves method calls to [[alts]] while assuming they have the same parameter count. + * + * @param jsName + * The JS name of the method, for error reporting + * @param formalArgsRegistry + * The registry of all the formal arguments + * @param alts + * Alternative methods + * @param tpe + * Result type + * @param paramIndex + * Index where to start disambiguation (starts at 0, increases through recursion) + * @param maxArgc + * Maximum number of arguments to use for disambiguation + */ + private def genOverloadDispatchSameArgcRec(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, + alts: List[Exported], tpe: jstpe.Type, paramIndex: Int, maxArgc: Option[Int]): js.Tree = { + + implicit val pos = alts.head.pos + + if (alts.sizeIs == 1) { + alts.head.genBody(formalArgsRegistry) + } else if (maxArgc.exists(_ <= paramIndex) || !alts.exists(_.params.size > paramIndex)) { + // We reach here in three cases: + // 1. The parameter list has been exhausted + // 2. The optional argument count restriction has triggered + // 3. We only have (more than once) repeated parameters left + // Therefore, we should fail + reportCannotDisambiguateError(jsName, alts.map(_.sym)) + js.Undefined() + } else { + val altsByTypeTest = groupByWithoutHashCode(alts) { exported => + typeTestForTpe(exported.exportArgTypeAt(paramIndex)) + } + + if (altsByTypeTest.size == 1) { + // Testing this parameter is not doing any us good + genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex + 1, maxArgc) + } else { + // Sort them so that, e.g., isInstanceOf[String] comes before isInstanceOf[Object] + val sortedAltsByTypeTest = topoSortDistinctsWith(altsByTypeTest) { (lhs, rhs) => + (lhs._1, rhs._1) match { + // NoTypeTest is always last + case (_, NoTypeTest) => true + case (NoTypeTest, _) => false + + case (PrimitiveTypeTest(_, rank1), PrimitiveTypeTest(_, rank2)) => + rank1 <= rank2 + + case (InstanceOfTypeTest(t1), InstanceOfTypeTest(t2)) => + t1 <:< t2 + + case (_: PrimitiveTypeTest, _: InstanceOfTypeTest) => true + case (_: InstanceOfTypeTest, _: PrimitiveTypeTest) => false + } + } + + val defaultCase = genThrowTypeError() + + sortedAltsByTypeTest.foldRight[js.Tree](defaultCase) { (elem, elsep) => + val (typeTest, subAlts) = elem + implicit val pos = subAlts.head.pos + + val paramRef = formalArgsRegistry.genArgRef(paramIndex) + val genSubAlts = genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, + subAlts, tpe, paramIndex + 1, maxArgc) + + def hasDefaultParam = subAlts.exists(_.hasDefaultAt(paramIndex)) + + val optCond = typeTest match { + case PrimitiveTypeTest(tpe, _) => Some(js.IsInstanceOf(paramRef, tpe)) + case InstanceOfTypeTest(tpe) => Some(genIsInstanceOf(paramRef, tpe)) + case NoTypeTest => None + } + + optCond.fold[js.Tree] { + genSubAlts // note: elsep is discarded, obviously + } { cond => + val condOrUndef = if (!hasDefaultParam) cond else { + js.If(cond, js.BooleanLiteral(true), + js.BinaryOp(js.BinaryOp.===, paramRef, js.Undefined()))( + jstpe.BooleanType) + } + js.If(condOrUndef, genSubAlts, elsep)(tpe) + } + } + } + } + } + + private def reportCannotDisambiguateError(jsName: JSName, alts: List[Symbol]): Unit = { + val currentClass = currentClassSym.get + + /* Find a position that is in the current class for decent error reporting. + * If there are more than one, always use the "highest" one (i.e., the + * one coming last in the source text) so that we reliably display the + * same error in all compilers. + */ + val validPositions = alts.collect { + case alt if alt.owner == currentClass => alt.sourcePos + } + val pos: SourcePosition = + if (validPositions.isEmpty) currentClass.sourcePos + else validPositions.maxBy(_.point) + + val kind = + if (alts.head.isJSGetter) "getter" + else if (alts.head.isJSSetter) "setter" + else "method" + + val fullKind = + if (currentClass.isJSType) kind + else "exported " + kind + + val displayName = jsName.displayName + val altsTypesInfo = alts.map(_.info.show).sorted.mkString("\n ") + + report.error( + em"Cannot disambiguate overloads for $fullKind $displayName with types\n $altsTypesInfo", + pos) + } + + /** Generates a call to the method represented by the given `exported` while using the formalArguments + * and potentially the argument array. + * + * Also inserts default parameters if required. + */ + private def genApplyForSingleExported(formalArgsRegistry: FormalArgsRegistry, + exported: Exported, static: Boolean): js.Tree = { + if (currentClassSym.isJSType && exported.sym.owner != currentClassSym.get) { + assert(!static, s"nonsensical JS super call in static export of ${exported.sym}") + genApplyForSingleExportedJSSuperCall(formalArgsRegistry, exported) + } else { + genApplyForSingleExportedNonJSSuperCall(formalArgsRegistry, exported, static) + } + } + + private def genApplyForSingleExportedJSSuperCall( + formalArgsRegistry: FormalArgsRegistry, exported: Exported): js.Tree = { + implicit val pos = exported.pos + + val sym = exported.sym + assert(!sym.isClassConstructor, + s"Trying to genApplyForSingleExportedJSSuperCall for the constructor ${sym.fullName}") + + val allArgs = formalArgsRegistry.genAllArgsRefsForForwarder() + + val superClass = { + val superClassSym = currentClassSym.asClass.superClass + if (superClassSym.isNestedJSClass) + js.VarRef(js.LocalIdent(JSSuperClassParamName))(jstpe.AnyType) + else + js.LoadJSConstructor(encodeClassName(superClassSym)) + } + + val receiver = js.This()(currentThisType) + val nameTree = genExpr(sym.jsName) + + if (sym.isJSGetter) { + assert(allArgs.isEmpty, + s"getter symbol $sym does not have a getter signature") + js.JSSuperSelect(superClass, receiver, nameTree) + } else if (sym.isJSSetter) { + assert(allArgs.size == 1 && allArgs.head.isInstanceOf[js.Tree], + s"setter symbol $sym does not have a setter signature") + js.Assign(js.JSSuperSelect(superClass, receiver, nameTree), + allArgs.head.asInstanceOf[js.Tree]) + } else { + js.JSSuperMethodCall(superClass, receiver, nameTree, allArgs) + } + } + + private def genApplyForSingleExportedNonJSSuperCall( + formalArgsRegistry: FormalArgsRegistry, exported: Exported, static: Boolean): js.Tree = { + + implicit val pos = exported.pos + + val varDefs = new mutable.ListBuffer[js.VarDef] + + for ((param, i) <- exported.params.zipWithIndex) { + val rhs = genScalaArg(exported, i, formalArgsRegistry, param, static, captures = Nil)( + prevArgsCount => varDefs.take(prevArgsCount).toList.map(_.ref)) + + varDefs += js.VarDef(freshLocalIdent("prep" + i), NoOriginalName, rhs.tpe, mutable = false, rhs) + } + + val builtVarDefs = varDefs.result() + + val jsResult = genResult(exported, builtVarDefs.map(_.ref), static) + + js.Block(builtVarDefs :+ jsResult) + } + + /** Generates a Scala argument from dispatched JavaScript arguments + * (unboxing and default parameter handling). + */ + def genScalaArg(exported: Exported, paramIndex: Int, formalArgsRegistry: FormalArgsRegistry, + param: JSParamInfo, static: Boolean, captures: List[js.Tree])( + previousArgsValues: Int => List[js.Tree])( + implicit pos: SourcePosition): js.Tree = { + + if (param.repeated) { + genJSArrayToVarArgs(formalArgsRegistry.genVarargRef(paramIndex)) + } else { + val jsArg = formalArgsRegistry.genArgRef(paramIndex) + + // Unboxed argument (if it is defined) + val unboxedArg = unbox(jsArg, param.info) + + if (exported.hasDefaultAt(paramIndex)) { + // If argument is undefined and there is a default getter, call it + js.If(js.BinaryOp(js.BinaryOp.===, jsArg, js.Undefined()), { + genCallDefaultGetter(exported.sym, paramIndex, static, captures)(previousArgsValues) + }, { + unboxedArg + })(unboxedArg.tpe) + } else { + // Otherwise, it is always the unboxed argument + unboxedArg + } + } + } + + def genCallDefaultGetter(sym: Symbol, paramIndex: Int, + static: Boolean, captures: List[js.Tree])( + previousArgsValues: Int => List[js.Tree])( + implicit pos: SourcePosition): js.Tree = { + + val targetSym = targetSymForDefaultGetter(sym) + val defaultGetterDenot = this.defaultGetterDenot(targetSym, sym, paramIndex) + + assert(defaultGetterDenot.exists, s"need default getter for method ${sym.fullName}") + assert(!defaultGetterDenot.isOverloaded, i"found overloaded default getter $defaultGetterDenot") + val defaultGetter = defaultGetterDenot.symbol + + val targetTree = { + if (sym.isClassConstructor || static) { + if (targetSym.isStatic) { + assert(captures.isEmpty, i"expected empty captures for ${targetSym.fullName} at $pos") + genLoadModule(targetSym) + } else { + assert(captures.sizeIs == 1, "expected exactly one capture") + + // Find the module accessor. We cannot use memberBasedOnFlags because of scala-js/scala-js#4526. + val outer = targetSym.originalOwner + val name = atPhase(typerPhase)(targetSym.name.unexpandedName).sourceModuleName + val modAccessor = outer.info.allMembers.find { denot => + denot.symbol.is(Module) && denot.name.unexpandedName == name + }.getOrElse { + throw new AssertionError(i"could not find module accessor for ${targetSym.fullName} at $pos") + }.symbol + + val receiver = captures.head + if (outer.isJSType) + genApplyJSClassMethod(receiver, modAccessor, Nil) + else + genApplyMethodMaybeStatically(receiver, modAccessor, Nil) + } + } else { + js.This()(currentThisType) + } + } + + // Pass previous arguments to defaultGetter + val defaultGetterArgs = previousArgsValues(defaultGetter.info.paramInfoss.head.size) + + val callGetter = if (targetSym.isJSType) { + if (defaultGetter.owner.isNonNativeJSClass) { + if (defaultGetter.hasAnnotation(jsdefn.JSOptionalAnnot)) + js.Undefined() + else + genApplyJSClassMethod(targetTree, defaultGetter, defaultGetterArgs) + } else if (defaultGetter.owner == targetSym) { + /* We get here if a non-native constructor has a native companion. + * This is reported on a per-class level. + */ + assert(sym.isClassConstructor, + s"got non-constructor method $sym with default method in JS native companion") + js.Undefined() + } else { + report.error( + "When overriding a native method with default arguments, " + + "the overriding method must explicitly repeat the default arguments.", + sym.srcPos) + js.Undefined() + } + } else { + genApplyMethod(targetTree, defaultGetter, defaultGetterArgs) + } + + // #15419 If the getter returns void, we must "box" it by returning undefined + if (callGetter.tpe == jstpe.NoType) + js.Block(callGetter, js.Undefined()) + else + callGetter + } + + private def targetSymForDefaultGetter(sym: Symbol): Symbol = + if (sym.isClassConstructor) sym.owner.companionModule.moduleClass + else sym.owner + + private def defaultGetterDenot(targetSym: Symbol, sym: Symbol, paramIndex: Int): Denotation = + targetSym.info.memberBasedOnFlags(DefaultGetterName(sym.name.asTermName, paramIndex), excluded = Bridge) + + private def defaultGetterDenot(sym: Symbol, paramIndex: Int): Denotation = + defaultGetterDenot(targetSymForDefaultGetter(sym), sym, paramIndex) + + /** Generate the final forwarding call to the exported method. */ + private def genResult(exported: Exported, args: List[js.Tree], static: Boolean)( + implicit pos: SourcePosition): js.Tree = { + + val sym = exported.sym + val currentClass = currentClassSym.get + + def receiver = + if (static) genLoadModule(sym.owner) + else js.This()(currentThisType) + + def boxIfNeeded(call: js.Tree): js.Tree = + box(call, atPhase(elimErasedValueTypePhase)(sym.info.resultType)) + + if (currentClass.isNonNativeJSClass) { + assert(sym.owner == currentClass, sym.fullName) + boxIfNeeded(genApplyJSClassMethod(receiver, sym, args)) + } else { + if (sym.isClassConstructor) + js.New(encodeClassName(currentClass), encodeMethodSym(sym), args) + else if (sym.isPrivate) + boxIfNeeded(genApplyMethodStatically(receiver, sym, args)) + else + boxIfNeeded(genApplyMethod(receiver, sym, args)) + } + } + + private def genThrowTypeError(msg: String = "No matching overload")(implicit pos: Position): js.Tree = + js.Throw(js.JSNew(js.JSGlobalRef("TypeError"), js.StringLiteral(msg) :: Nil)) + + abstract class Exported( + val sym: Symbol, + // Parameters participating in overload resolution. + val params: scala.collection.immutable.IndexedSeq[JSParamInfo] + ) { + assert(!params.exists(_.capture), "illegal capture params in Exported") + + private val paramsHasDefault = { + if (!atPhase(elimRepeatedPhase)(sym.hasDefaultParams)) { + Vector.empty + } else { + val targetSym = targetSymForDefaultGetter(sym) + params.indices.map(i => defaultGetterDenot(targetSym, sym, i).exists) + } + } + + def hasDefaultAt(paramIndex: Int): Boolean = + paramIndex < paramsHasDefault.size && paramsHasDefault(paramIndex) + + val hasRepeatedParam = params.nonEmpty && params.last.repeated + + val minArgc = { + // Find the first default param or repeated param + params + .indices + .find(i => hasDefaultAt(i) || params(i).repeated) + .getOrElse(params.size) + } + + val maxNonRepeatedArgc = if (hasRepeatedParam) params.size - 1 else params.size + + def pos: SourcePosition = sym.sourcePos + + def exportArgTypeAt(paramIndex: Int): Type = { + if (paramIndex < params.length) { + params(paramIndex).info + } else { + assert(hasRepeatedParam, i"$sym does not have varargs nor enough params for $paramIndex") + params.last.info + } + } + + def typeInfo: String = sym.info.toString + + def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree + } + + private class ExportedSymbol(sym: Symbol, static: Boolean) + extends Exported(sym, sym.jsParamInfos.toIndexedSeq) { + + def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree = + genApplyForSingleExported(formalArgsRegistry, this, static) + } + + // !!! Hash codes of RTTypeTest are meaningless because of InstanceOfTypeTest + private sealed abstract class RTTypeTest + + private case class PrimitiveTypeTest(tpe: jstpe.Type, rank: Int) extends RTTypeTest + + // !!! This class does not have a meaningful hash code + private case class InstanceOfTypeTest(tpe: Type) extends RTTypeTest { + override def equals(that: Any): Boolean = { + that match { + case InstanceOfTypeTest(thatTpe) => tpe =:= thatTpe + case _ => false + } + } + } + + private case object NoTypeTest extends RTTypeTest + + /** Very simple O(n²) topological sort for elements assumed to be distinct. */ + private def topoSortDistinctsWith[A <: AnyRef](coll: List[A])(lteq: (A, A) => Boolean): List[A] = { + @tailrec + def loop(coll: List[A], acc: List[A]): List[A] = { + if (coll.isEmpty) acc + else if (coll.tail.isEmpty) coll.head :: acc + else { + val (lhs, rhs) = coll.span(x => !coll.forall(y => (x eq y) || !lteq(x, y))) + assert(!rhs.isEmpty, s"cycle while ordering $coll") + loop(lhs ::: rhs.tail, rhs.head :: acc) + } + } + + loop(coll, Nil) + } + + private def typeTestForTpe(tpe: Type): RTTypeTest = { + tpe match { + case tpe: ErasedValueType => + InstanceOfTypeTest(tpe.tycon.typeSymbol.typeRef) + + case _ => + import org.scalajs.ir.Names + + (toIRType(tpe): @unchecked) match { + case jstpe.AnyType => NoTypeTest + + case jstpe.NoType => PrimitiveTypeTest(jstpe.UndefType, 0) + case jstpe.BooleanType => PrimitiveTypeTest(jstpe.BooleanType, 1) + case jstpe.CharType => PrimitiveTypeTest(jstpe.CharType, 2) + case jstpe.ByteType => PrimitiveTypeTest(jstpe.ByteType, 3) + case jstpe.ShortType => PrimitiveTypeTest(jstpe.ShortType, 4) + case jstpe.IntType => PrimitiveTypeTest(jstpe.IntType, 5) + case jstpe.LongType => PrimitiveTypeTest(jstpe.LongType, 6) + case jstpe.FloatType => PrimitiveTypeTest(jstpe.FloatType, 7) + case jstpe.DoubleType => PrimitiveTypeTest(jstpe.DoubleType, 8) + + case jstpe.ClassType(Names.BoxedUnitClass) => PrimitiveTypeTest(jstpe.UndefType, 0) + case jstpe.ClassType(Names.BoxedStringClass) => PrimitiveTypeTest(jstpe.StringType, 9) + case jstpe.ClassType(_) => InstanceOfTypeTest(tpe) + + case jstpe.ArrayType(_) => InstanceOfTypeTest(tpe) + } + } + } + + // Group-by that does not rely on hashCode(), only equals() - O(n²) + private def groupByWithoutHashCode[A, B](coll: List[A])(f: A => B): List[(B, List[A])] = { + val m = new mutable.ArrayBuffer[(B, List[A])] + m.sizeHint(coll.length) + + for (elem <- coll) { + val key = f(elem) + val index = m.indexWhere(_._1 == key) + if (index < 0) + m += ((key, List(elem))) + else + m(index) = (key, elem :: m(index)._2) + } + + m.toList + } + + class FormalArgsRegistry(val minArgc: Int, needsRestParam: Boolean) { + private val fixedParamNames: scala.collection.immutable.IndexedSeq[jsNames.LocalName] = + (0 until minArgc).toIndexedSeq.map(_ => freshLocalIdent("arg")(NoPosition).name) + + private val restParamName: jsNames.LocalName = + if (needsRestParam) freshLocalIdent("rest")(NoPosition).name + else null + + def genFormalArgs()(implicit pos: Position): (List[js.ParamDef], Option[js.ParamDef]) = { + val fixedParamDefs = fixedParamNames.toList.map { paramName => + js.ParamDef(js.LocalIdent(paramName), NoOriginalName, jstpe.AnyType, mutable = false) + } + + val restParam = { + if (needsRestParam) + Some(js.ParamDef(js.LocalIdent(restParamName), NoOriginalName, jstpe.AnyType, mutable = false)) + else + None + } + + (fixedParamDefs, restParam) + } + + def genArgRef(index: Int)(implicit pos: Position): js.Tree = { + if (index < minArgc) + js.VarRef(js.LocalIdent(fixedParamNames(index)))(jstpe.AnyType) + else + js.JSSelect(genRestArgRef(), js.IntLiteral(index - minArgc)) + } + + def genVarargRef(fixedParamCount: Int)(implicit pos: Position): js.Tree = { + assert(fixedParamCount >= minArgc, s"genVarargRef($fixedParamCount) with minArgc = $minArgc at $pos") + val restParam = genRestArgRef() + if (fixedParamCount == minArgc) + restParam + else + js.JSMethodApply(restParam, js.StringLiteral("slice"), List(js.IntLiteral(fixedParamCount - minArgc))) + } + + def genRestArgRef()(implicit pos: Position): js.Tree = { + assert(needsRestParam, s"trying to generate a reference to non-existent rest param at $pos") + js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) + } + + def genAllArgsRefsForForwarder()(implicit pos: Position): List[js.TreeOrJSSpread] = { + val fixedArgRefs = fixedParamNames.toList.map { paramName => + js.VarRef(js.LocalIdent(paramName))(jstpe.AnyType) + } + + if (needsRestParam) { + val restArgRef = js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) + fixedArgRefs :+ js.JSSpread(restArgRef) + } else { + fixedArgRefs + } + } + } +} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala b/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala new file mode 100644 index 000000000000..2fd007165952 --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala @@ -0,0 +1,102 @@ +package dotty.tools.backend.sjs + +import scala.language.unsafeNulls + +import java.net.{URI, URISyntaxException} + +import dotty.tools.dotc.core._ +import Contexts._ +import Decorators.em + +import dotty.tools.dotc.report + +import dotty.tools.dotc.util.{SourceFile, SourcePosition} +import dotty.tools.dotc.util.Spans.Span + +import org.scalajs.ir + +/** Conversion utilities from dotty Positions to IR Positions. */ +class JSPositions()(using Context) { + import JSPositions._ + + private val sourceURIMaps: List[URIMap] = { + ctx.settings.scalajsMapSourceURI.value.flatMap { option => + val uris = option.split("->") + if (uris.length != 1 && uris.length != 2) { + report.error("-scalajs-mapSourceURI needs one or two URIs as argument (separated by '->').") + Nil + } else { + try { + val from = new URI(uris.head) + val to = uris.lift(1).map(str => new URI(str)) + URIMap(from, to) :: Nil + } catch { + case e: URISyntaxException => + report.error(em"${e.getInput} is not a valid URI") + Nil + } + } + } + } + + private def sourceAndSpan2irPos(source: SourceFile, span: Span): ir.Position = { + if (!span.exists) ir.Position.NoPosition + else { + // dotty positions and IR positions are both 0-based + val irSource = span2irPosCache.toIRSource(source) + val point = span.point + val line = source.offsetToLine(point) + val column = source.column(point) + ir.Position(irSource, line, column) + } + } + + /** Implicit conversion from dotty Span to ir.Position. */ + implicit def span2irPos(span: Span): ir.Position = + sourceAndSpan2irPos(ctx.compilationUnit.source, span) + + /** Implicitly materializes an ir.Position from an implicit dotty Span. */ + implicit def implicitSpan2irPos(implicit span: Span): ir.Position = + span2irPos(span) + + /** Implicitly materializes an ir.Position from an implicit dotty SourcePosition. */ + implicit def implicitSourcePos2irPos(implicit sourcePos: SourcePosition): ir.Position = + sourceAndSpan2irPos(sourcePos.source, sourcePos.span) + + private object span2irPosCache { + import dotty.tools.dotc.util._ + + private var lastDotcSource: SourceFile = null + private var lastIRSource: ir.Position.SourceFile = null + + def toIRSource(dotcSource: SourceFile): ir.Position.SourceFile = { + if (dotcSource != lastDotcSource) { + lastIRSource = convert(dotcSource) + lastDotcSource = dotcSource + } + lastIRSource + } + + private def convert(dotcSource: SourceFile): ir.Position.SourceFile = { + dotcSource.file.file match { + case null => + new java.net.URI( + "virtualfile", // Pseudo-Scheme + dotcSource.file.path, // Scheme specific part + null // Fragment + ) + case file => + val srcURI = file.toURI + sourceURIMaps.collectFirst { + case URIMap(from, to) if from.relativize(srcURI) != srcURI => + val relURI = from.relativize(srcURI) + to.fold(relURI)(_.resolve(relURI)) + }.getOrElse(srcURI) + } + } + } +} + +object JSPositions { + final case class URIMap(from: URI, to: Option[URI]) +} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala b/tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala new file mode 100644 index 000000000000..ce83f5e9e83b --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala @@ -0,0 +1,150 @@ +package dotty.tools.backend.sjs + +import dotty.tools.dotc.core._ +import Names.TermName +import Types._ +import Contexts._ +import Symbols._ +import Decorators.em + +import dotty.tools.dotc.ast.tpd._ +import dotty.tools.backend.jvm.DottyPrimitives +import dotty.tools.dotc.report +import dotty.tools.dotc.util.ReadOnlyMap + +object JSPrimitives { + + inline val FirstJSPrimitiveCode = 300 + + inline val DYNNEW = FirstJSPrimitiveCode + 1 // Instantiate a new JavaScript object + + inline val ARR_CREATE = DYNNEW + 1 // js.Array.apply (array literal syntax) + + inline val TYPEOF = ARR_CREATE + 1 // typeof x + inline val JS_NATIVE = TYPEOF + 1 // js.native. Marker method. Fails if tried to be emitted. + + inline val UNITVAL = JS_NATIVE + 1 // () value, which is undefined + + inline val JS_NEW_TARGET = UNITVAL + 1 // js.new.target + + inline val JS_IMPORT = JS_NEW_TARGET + 1 // js.import.apply(specifier) + inline val JS_IMPORT_META = JS_IMPORT + 1 // js.import.meta + + inline val CONSTRUCTOROF = JS_IMPORT_META + 1 // runtime.constructorOf(clazz) + inline val CREATE_INNER_JS_CLASS = CONSTRUCTOROF + 1 // runtime.createInnerJSClass + inline val CREATE_LOCAL_JS_CLASS = CREATE_INNER_JS_CLASS + 1 // runtime.createLocalJSClass + inline val WITH_CONTEXTUAL_JS_CLASS_VALUE = CREATE_LOCAL_JS_CLASS + 1 // runtime.withContextualJSClassValue + inline val LINKING_INFO = WITH_CONTEXTUAL_JS_CLASS_VALUE + 1 // runtime.linkingInfo + inline val DYNAMIC_IMPORT = LINKING_INFO + 1 // runtime.dynamicImport + + inline val STRICT_EQ = DYNAMIC_IMPORT + 1 // js.special.strictEquals + inline val IN = STRICT_EQ + 1 // js.special.in + inline val INSTANCEOF = IN + 1 // js.special.instanceof + inline val DELETE = INSTANCEOF + 1 // js.special.delete + inline val FORIN = DELETE + 1 // js.special.forin + inline val JS_THROW = FORIN + 1 // js.special.throw + inline val JS_TRY_CATCH = JS_THROW + 1 // js.special.tryCatch + inline val WRAP_AS_THROWABLE = JS_TRY_CATCH + 1 // js.special.wrapAsThrowable + inline val UNWRAP_FROM_THROWABLE = WRAP_AS_THROWABLE + 1 // js.special.unwrapFromThrowable + inline val DEBUGGER = UNWRAP_FROM_THROWABLE + 1 // js.special.debugger + + inline val THROW = DEBUGGER + 1 + + inline val UNION_FROM = THROW + 1 // js.|.from + inline val UNION_FROM_TYPE_CONSTRUCTOR = UNION_FROM + 1 // js.|.fromTypeConstructor + + inline val REFLECT_SELECTABLE_SELECTDYN = UNION_FROM_TYPE_CONSTRUCTOR + 1 // scala.reflect.Selectable.selectDynamic + inline val REFLECT_SELECTABLE_APPLYDYN = REFLECT_SELECTABLE_SELECTDYN + 1 // scala.reflect.Selectable.applyDynamic + + inline val LastJSPrimitiveCode = REFLECT_SELECTABLE_APPLYDYN + + def isJSPrimitive(code: Int): Boolean = + code >= FirstJSPrimitiveCode && code <= LastJSPrimitiveCode + +} + +class JSPrimitives(ictx: DetachedContext) extends DottyPrimitives(ictx) { + import JSPrimitives._ + + private lazy val jsPrimitives: ReadOnlyMap[Symbol, Int] = initJSPrimitives(using ictx) + + override def getPrimitive(sym: Symbol): Int = + jsPrimitives.getOrElse(sym, super.getPrimitive(sym)) + + override def getPrimitive(app: Apply, tpe: Type)(using Context): Int = + jsPrimitives.getOrElse(app.fun.symbol, super.getPrimitive(app, tpe)) + + override def isPrimitive(sym: Symbol): Boolean = + jsPrimitives.contains(sym) || super.isPrimitive(sym) + + override def isPrimitive(fun: Tree): Boolean = + jsPrimitives.contains(fun.symbol(using ictx)) || super.isPrimitive(fun) + + /** Initialize the primitive map */ + private def initJSPrimitives(using Context): ReadOnlyMap[Symbol, Int] = { + + val primitives = MutableSymbolMap[Int]() + + // !!! Code duplicate with DottyPrimitives + /** Add a primitive operation to the map */ + def addPrimitive(s: Symbol, code: Int): Unit = { + assert(!(primitives contains s), "Duplicate primitive " + s) + primitives(s) = code + } + + def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { + val alts = cls.info.member(method).alternatives.map(_.symbol) + if (alts.isEmpty) { + report.error(em"Unknown primitive method $cls.$method") + } else { + for (s <- alts) + addPrimitive(s, code) + } + } + + val jsdefn = JSDefinitions.jsdefn + + addPrimitive(jsdefn.JSDynamic_newInstance, DYNNEW) + + addPrimitive(jsdefn.JSArray_apply, ARR_CREATE) + + addPrimitive(jsdefn.JSPackage_typeOf, TYPEOF) + addPrimitive(jsdefn.JSPackage_native, JS_NATIVE) + + addPrimitive(defn.BoxedUnit_UNIT, UNITVAL) + + addPrimitive(jsdefn.JSNew_target, JS_NEW_TARGET) + + addPrimitive(jsdefn.JSImport_apply, JS_IMPORT) + addPrimitive(jsdefn.JSImport_meta, JS_IMPORT_META) + + addPrimitive(jsdefn.Runtime_constructorOf, CONSTRUCTOROF) + addPrimitive(jsdefn.Runtime_createInnerJSClass, CREATE_INNER_JS_CLASS) + addPrimitive(jsdefn.Runtime_createLocalJSClass, CREATE_LOCAL_JS_CLASS) + addPrimitive(jsdefn.Runtime_withContextualJSClassValue, WITH_CONTEXTUAL_JS_CLASS_VALUE) + addPrimitive(jsdefn.Runtime_linkingInfo, LINKING_INFO) + addPrimitive(jsdefn.Runtime_dynamicImport, DYNAMIC_IMPORT) + + addPrimitive(jsdefn.Special_strictEquals, STRICT_EQ) + addPrimitive(jsdefn.Special_in, IN) + addPrimitive(jsdefn.Special_instanceof, INSTANCEOF) + addPrimitive(jsdefn.Special_delete, DELETE) + addPrimitive(jsdefn.Special_forin, FORIN) + addPrimitive(jsdefn.Special_throw, JS_THROW) + addPrimitive(jsdefn.Special_tryCatch, JS_TRY_CATCH) + addPrimitive(jsdefn.Special_wrapAsThrowable, WRAP_AS_THROWABLE) + addPrimitive(jsdefn.Special_unwrapFromThrowable, UNWRAP_FROM_THROWABLE) + addPrimitive(jsdefn.Special_debugger, DEBUGGER) + + addPrimitive(defn.throwMethod, THROW) + + addPrimitive(jsdefn.PseudoUnion_from, UNION_FROM) + addPrimitive(jsdefn.PseudoUnion_fromTypeConstructor, UNION_FROM_TYPE_CONSTRUCTOR) + + addPrimitive(jsdefn.ReflectSelectable_selectDynamic, REFLECT_SELECTABLE_SELECTDYN) + addPrimitive(jsdefn.ReflectSelectable_applyDynamic, REFLECT_SELECTABLE_APPLYDYN) + + primitives + } + +} diff --git a/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala b/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala new file mode 100644 index 000000000000..21462929833c --- /dev/null +++ b/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala @@ -0,0 +1,38 @@ +package dotty.tools.backend.sjs + +class ScopedVar[A](init: A) extends caps.Pure { + import ScopedVar.Assignment + + private[ScopedVar] var value = init + + def this()(implicit ev: Null <:< A) = this(ev(null)) + + def get: A = value + def :=(newValue: A): Assignment[A] = new Assignment(this, newValue) +} + +object ScopedVar { + class Assignment[T](scVar: ScopedVar[T], value: T) { + private[ScopedVar] def push(): AssignmentStackElement[T] = { + val stack = new AssignmentStackElement(scVar, scVar.value) + scVar.value = value + stack + } + } + + private class AssignmentStackElement[T](scVar: ScopedVar[T], oldValue: T) { + private[ScopedVar] def pop(): Unit = { + scVar.value = oldValue + } + } + + implicit def toValue[T](scVar: ScopedVar[T]): T = scVar.get + + def withScopedVars[T](ass: Assignment[_]*)(body: => T): T = { + val stack = ass.map(_.push()) + try body + finally stack.reverse.foreach(_.pop()) + } + + final class VarBox[A](var value: A) +} diff --git a/tests/pos-with-compiler-cc/dotc/Bench.scala b/tests/pos-with-compiler-cc/dotc/Bench.scala new file mode 100644 index 000000000000..c9c032b0ae7d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Bench.scala @@ -0,0 +1,64 @@ +package dotty.tools +package dotc + +import core.Contexts._ +import reporting.Reporter +import io.AbstractFile + +import scala.annotation.internal.sharable + +/** A main class for running compiler benchmarks. Can instantiate a given + * number of compilers and run each (sequentially) a given number of times + * on the same sources. + */ +object Bench extends Driver: + + @sharable private var numRuns = 1 + + private def ntimes(n: Int)(op: => Reporter): Reporter = + (0 until n).foldLeft(emptyReporter)((_, _) => op) + + @sharable private var times: Array[Int] = _ + + override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = + times = new Array[Int](numRuns) + var reporter: Reporter = emptyReporter + for i <- 0 until numRuns do + val start = System.nanoTime() + reporter = super.doCompile(compiler, files) + times(i) = ((System.nanoTime - start) / 1000000).toInt + println(s"time elapsed: ${times(i)}ms") + if ctx.settings.Xprompt.value then + print("hit to continue >") + System.in.nn.read() + println() + reporter + + def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = { + val pos = args indexOf name + if (pos < 0) (default, args) + else (args(pos + 1).toInt, (args take pos) ++ (args drop (pos + 2))) + } + + def reportTimes() = + val best = times.sorted + val measured = numRuns / 3 + val avgBest = best.take(measured).sum / measured + val avgLast = times.reverse.take(measured).sum / measured + println(s"best out of $numRuns runs: ${best(0)}") + println(s"average out of best $measured: $avgBest") + println(s"average out of last $measured: $avgLast") + + override def process(args: Array[String], rootCtx: Context): Reporter = + val (numCompilers, args1) = extractNumArg(args, "#compilers") + val (numRuns, args2) = extractNumArg(args1, "#runs") + this.numRuns = numRuns + var reporter: Reporter = emptyReporter + for i <- 0 until numCompilers do + reporter = super.process(args2, rootCtx) + reportTimes() + reporter + +end Bench + + diff --git a/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala new file mode 100644 index 000000000000..f70bda947129 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala @@ -0,0 +1,167 @@ +package dotty.tools +package dotc + +import core._ +import Contexts._ +import SymDenotations.ClassDenotation +import Symbols._ +import util.{FreshNameCreator, SourceFile, NoSource} +import util.Spans.Span +import ast.{tpd, untpd} +import tpd.{Tree, TreeTraverser} +import ast.Trees.{Import, Ident} +import typer.Nullables +import transform.SymUtils._ +import core.Decorators._ +import config.{SourceVersion, Feature} +import StdNames.nme +import scala.annotation.internal.sharable +import language.experimental.pureFunctions + +class CompilationUnit protected (val source: SourceFile) { + + override def toString: String = source.toString + + var untpdTree: untpd.Tree = untpd.EmptyTree + + var tpdTree: tpd.Tree = tpd.EmptyTree + + /** Is this the compilation unit of a Java file */ + def isJava: Boolean = source.file.name.endsWith(".java") + + /** The source version for this unit, as determined by a language import */ + var sourceVersion: Option[SourceVersion] = None + + /** Pickled TASTY binaries, indexed by class. */ + var pickled: Map[ClassSymbol, () -> Array[Byte]] = Map() + + /** The fresh name creator for the current unit. + * FIXME(#7661): This is not fine-grained enough to enable reproducible builds, + * see https://github.com/scala/scala/commit/f50ec3c866263448d803139e119b33afb04ec2bc + */ + val freshNames: FreshNameCreator = new FreshNameCreator.Default + + /** Will be set to `true` if there are inline call that must be inlined after typer. + * The information is used in phase `Inlining` in order to avoid traversing trees that need no transformations. + */ + var needsInlining: Boolean = false + + /** Set to `true` if inliner added anonymous mirrors that need to be completed */ + var needsMirrorSupport: Boolean = false + + /** Will be set to `true` if contains `Quote`. + * The information is used in phase `Staging`/`Splicing`/`PickleQuotes` in order to avoid traversing trees that need no transformations. + */ + var needsStaging: Boolean = false + + /** Will be set to true if the unit contains a captureChecking language import */ + var needsCaptureChecking: Boolean = false + + /** Will be set to true if the unit contains a pureFunctions language import */ + var knowsPureFuns: Boolean = false + + var suspended: Boolean = false + var suspendedAtInliningPhase: Boolean = false + + /** Can this compilation unit be suspended */ + def isSuspendable: Boolean = true + + /** Suspends the compilation unit by thowing a SuspendException + * and recording the suspended compilation unit + */ + def suspend()(using Context): Nothing = + assert(isSuspendable) + if !suspended then + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspended: $this") + suspended = true + ctx.run.nn.suspendedUnits += this + if ctx.phase == Phases.inliningPhase then + suspendedAtInliningPhase = true + throw CompilationUnit.SuspendException() + + private var myAssignmentSpans: Map[Int, List[Span]] | Null = null + + /** A map from (name-) offsets of all local variables in this compilation unit + * that can be tracked for being not null to the list of spans of assignments + * to these variables. + */ + def assignmentSpans(using Context): Map[Int, List[Span]] = + if myAssignmentSpans == null then myAssignmentSpans = Nullables.assignmentSpans + myAssignmentSpans.nn +} + +@sharable object NoCompilationUnit extends CompilationUnit(NoSource) { + + override def isJava: Boolean = false + + override def suspend()(using Context): Nothing = + throw CompilationUnit.SuspendException() + + override def assignmentSpans(using Context): Map[Int, List[Span]] = Map.empty +} + +object CompilationUnit { + + class SuspendException extends Exception + + /** Make a compilation unit for top class `clsd` with the contents of the `unpickled` tree */ + def apply(clsd: ClassDenotation, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = + val file = clsd.symbol.associatedFile.nn + apply(SourceFile(file, Array.empty[Char]), unpickled, forceTrees) + + /** Make a compilation unit, given picked bytes and unpickled tree */ + def apply(source: SourceFile, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = { + assert(!unpickled.isEmpty, unpickled) + val unit1 = new CompilationUnit(source) + unit1.tpdTree = unpickled + if (forceTrees) { + val force = new Force + force.traverse(unit1.tpdTree) + unit1.needsStaging = force.containsQuote + unit1.needsInlining = force.containsInline + } + unit1 + } + + /** Create a compilation unit corresponding to `source`. + * If `mustExist` is true, this will fail if `source` does not exist. + */ + def apply(source: SourceFile, mustExist: Boolean = true)(using Context): CompilationUnit = { + val src = + if (!mustExist) + source + else if (source.file.isDirectory) { + report.error(em"expected file, received directory '${source.file.path}'") + NoSource + } + else if (!source.file.exists) { + report.error(em"source file not found: ${source.file.path}") + NoSource + } + else source + new CompilationUnit(src) + } + + /** Force the tree to be loaded */ + private class Force extends TreeTraverser { + var containsQuote = false + var containsInline = false + var containsCaptureChecking = false + def traverse(tree: Tree)(using Context): Unit = { + if (tree.symbol.isQuote) + containsQuote = true + if tree.symbol.is(Flags.Inline) then + containsInline = true + tree match + case Import(qual, selectors) => + tpd.languageImport(qual) match + case Some(prefix) => + for case untpd.ImportSelector(untpd.Ident(imported), untpd.EmptyTree, _) <- selectors do + Feature.handleGlobalLanguageImport(prefix, imported) + case _ => + case _ => + traverseChildren(tree) + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Compiler.scala b/tests/pos-with-compiler-cc/dotc/Compiler.scala new file mode 100644 index 000000000000..b121a47781e1 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Compiler.scala @@ -0,0 +1,171 @@ +package dotty.tools +package dotc + +import core._ +import Contexts._ +import typer.{TyperPhase, RefChecks} +import cc.CheckCaptures +import parsing.Parser +import Phases.Phase +import transform._ +import dotty.tools.backend +import backend.jvm.{CollectSuperCalls, GenBCode} +import localopt.StringInterpolatorOpt + +/** The central class of the dotc compiler. The job of a compiler is to create + * runs, which process given `phases` in a given `rootContext`. + */ +class Compiler { + + /** Meta-ordering constraint: + * + * DenotTransformers that change the signature of their denotation's info must go + * after erasure. The reason is that denotations are permanently referred to by + * TermRefs which contain a signature. If the signature of a symbol would change, + * all refs to it would become outdated - they could not be dereferenced in the + * new phase. + * + * After erasure, signature changing denot-transformers are OK because signatures + * are never recomputed later than erasure. + */ + def phases: List[List[Phase]] = + frontendPhases ::: picklerPhases ::: transformPhases ::: backendPhases + + /** Phases dealing with the frontend up to trees ready for TASTY pickling */ + protected def frontendPhases: List[List[Phase]] = + List(new Parser) :: // Compiler frontend: scanner, parser + List(new TyperPhase) :: // Compiler frontend: namer, typer + List(new YCheckPositions) :: // YCheck positions + List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks + List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files + List(new PostTyper) :: // Additional checks and cleanups after type checking + List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) + List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks + List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols + Nil + + /** Phases dealing with TASTY tree pickling and unpickling */ + protected def picklerPhases: List[List[Phase]] = + List(new Pickler) :: // Generate TASTY info + List(new Inlining) :: // Inline and execute macros + List(new PostInlining) :: // Add mirror support for inlined code + List(new Staging) :: // Check staging levels and heal staged types + List(new Splicing) :: // Replace level 1 splices with holes + List(new PickleQuotes) :: // Turn quoted trees into explicit run-time data structures + Nil + + /** Phases dealing with the transformation from pickled trees to backend trees */ + protected def transformPhases: List[List[Phase]] = + List(new InstrumentCoverage) :: // Perform instrumentation for code coverage (if -coverage-out is set) + List(new FirstTransform, // Some transformations to put trees into a canonical form + new CheckReentrant, // Internal use only: Check that compiled program has no data races involving global vars + new ElimPackagePrefixes, // Eliminate references to package prefixes in Select nodes + new CookComments, // Cook the comments: expand variables, doc, etc. + new CheckStatic, // Check restrictions that apply to @static members + new CheckLoopingImplicits, // Check that implicit defs do not call themselves in an infinite loop + new BetaReduce, // Reduce closure applications + new InlineVals, // Check right hand-sides of an `inline val`s + new ExpandSAMs, // Expand single abstract method closures to anonymous classes + new ElimRepeated, // Rewrite vararg parameters and arguments + new RefChecks) :: // Various checks mostly related to abstract members and overriding + List(new init.Checker) :: // Check initialization of objects + List(new CrossVersionChecks, // Check issues related to deprecated and experimental + new ProtectedAccessors, // Add accessors for protected members + new ExtensionMethods, // Expand methods of value classes with extension methods + new UncacheGivenAliases, // Avoid caching RHS of simple parameterless given aliases + new ElimByName, // Map by-name parameters to functions + new HoistSuperArgs, // Hoist complex arguments of supercalls to enclosing scope + new ForwardDepChecks, // Check that there are no forward references to local vals + new SpecializeApplyMethods, // Adds specialized methods to FunctionN + new TryCatchPatterns, // Compile cases in try/catch + new PatternMatcher) :: // Compile pattern matches + List(new TestRecheck.Pre) :: // Test only: run rechecker, enabled under -Yrecheck-test + List(new TestRecheck) :: // Test only: run rechecker, enabled under -Yrecheck-test + List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under captureChecking + List(new CheckCaptures) :: // Check captures, enabled under captureChecking + List(new ElimOpaque, // Turn opaque into normal aliases + new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) + new ExplicitOuter, // Add accessors to outer classes from nested ones. + new ExplicitSelf, // Make references to non-trivial self types explicit as casts + new StringInterpolatorOpt) :: // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats + List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions + new UninitializedDefs, // Replaces `compiletime.uninitialized` by `_` + new InlinePatterns, // Remove placeholders of inlined patterns + new VCInlineMethods, // Inlines calls to value class methods + new SeqLiterals, // Express vararg arguments as arrays + new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods + new Getters, // Replace non-private vals and vars with getter defs (fields are added later) + new SpecializeFunctions, // Specialized Function{0,1,2} by replacing super with specialized super + new SpecializeTuples, // Specializes Tuples by replacing tuple construction and selection trees + new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods + new CollectNullableFields, // Collect fields that can be nulled out after use in lazy initialization + new ElimOuterSelect, // Expand outer selections + new ResolveSuper, // Implement super accessors + new FunctionXXLForwarders, // Add forwarders for FunctionXXL apply method + new ParamForwarding, // Add forwarders for aliases of superclass parameters + new TupleOptimizations, // Optimize generic operations on tuples + new LetOverApply, // Lift blocks from receivers of applications + new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. + List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. + List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types + new PureStats, // Remove pure stats from blocks + new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations + new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference + new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` + new sjs.AddLocalJSFakeNews, // Adds fake new invocations to local JS classes in calls to `createLocalJSClass` + new ElimPolyFunction, // Rewrite PolyFunction subclasses to FunctionN subclasses + new TailRec, // Rewrite tail recursion to loops + new CompleteJavaEnums, // Fill in constructors for Java enums + new Mixin, // Expand trait fields and trait initializers + new LazyVals, // Expand lazy vals + new Memoize, // Add private fields to getters and setters + new NonLocalReturns, // Expand non-local returns + new CapturedVars) :: // Represent vars captured by closures as heap objects + List(new Constructors, // Collect initialization code in primary constructors + // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it + new Instrumentation) :: // Count calls and allocations under -Yinstrument + List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments + // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here + new ElimStaticThis, // Replace `this` references to static objects by global identifiers + new CountOuterAccesses) :: // Identify outer accessors that can be dropped + List(new DropOuterAccessors, // Drop unused outer accessors + new CheckNoSuperThis, // Check that supercalls don't contain references to `this` + new Flatten, // Lift all inner classes to package scope + new TransformWildcards, // Replace wildcards with default values + new MoveStatics, // Move static methods from companion to the class itself + new ExpandPrivate, // Widen private definitions accessed from nested classes + new RestoreScopes, // Repair scopes rendered invalid by moving definitions in prior phases of the group + new SelectStatic, // get rid of selects that would be compiled into GetStatic + new sjs.JUnitBootstrappers, // Generate JUnit-specific bootstrapper classes for Scala.js (not enabled by default) + new CollectEntryPoints, // Collect all entry points and save them in the context + new CollectSuperCalls, // Find classes that are called with super + new RepeatableAnnotations) :: // Aggregate repeatable annotations + Nil + + /** Generate the output of the compilation */ + protected def backendPhases: List[List[Phase]] = + List(new backend.sjs.GenSJSIR) :: // Generate .sjsir files for Scala.js (not enabled by default) + List(new GenBCode) :: // Generate JVM bytecode + Nil + + var runId: Int = 1 + def nextRunId: Int = { + runId += 1; runId + } + + def reset()(using Context): Unit = { + ctx.base.reset() + val run = ctx.run + if (run != null) run.reset() + } + + def newRun(using Context): Run = { + reset() + val rctx = + if ctx.settings.Xsemanticdb.value then + ctx.addMode(Mode.ReadPositions) + else + ctx + new Run(this, rctx) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Driver.scala b/tests/pos-with-compiler-cc/dotc/Driver.scala new file mode 100644 index 000000000000..b85f1365243b --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Driver.scala @@ -0,0 +1,207 @@ +package dotty.tools.dotc + +import dotty.tools.FatalError +import config.CompilerCommand +import core.Comments.{ContextDoc, ContextDocstrings} +import core.Contexts._ +import core.{MacroClassLoader, TypeError} +import dotty.tools.dotc.ast.Positioned +import dotty.tools.io.AbstractFile +import reporting._ +import core.Decorators._ +import config.Feature + +import scala.util.control.NonFatal +import fromtasty.{TASTYCompiler, TastyFileUtil} + +/** Run the Dotty compiler. + * + * Extending this class lets you customize many aspect of the compilation + * process, but in most cases you only need to call [[process]] on the + * existing object [[Main]]. + */ +class Driver { + + protected def newCompiler(using Context): Compiler = + if (ctx.settings.fromTasty.value) new TASTYCompiler + else new Compiler + + protected def emptyReporter: Reporter = new StoreReporter(null) + + protected def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = + if files.nonEmpty then + try + val run = compiler.newRun + run.compile(files) + finish(compiler, run) + catch + case ex: FatalError => + report.error(ex.getMessage.nn) // signals that we should fail compilation. + case ex: TypeError => + println(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}") + throw ex + case ex: Throwable => + println(s"$ex while compiling ${files.map(_.path).mkString(", ")}") + throw ex + ctx.reporter + + protected def finish(compiler: Compiler, run: Run)(using Context): Unit = + run.printSummary() + if !ctx.reporter.errorsReported && run.suspendedUnits.nonEmpty then + val suspendedUnits = run.suspendedUnits.toList + if (ctx.settings.XprintSuspension.value) + report.echo(i"compiling suspended $suspendedUnits%, %") + val run1 = compiler.newRun + for unit <- suspendedUnits do unit.suspended = false + run1.compileUnits(suspendedUnits) + finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) + + protected def initCtx: Context = (new ContextBase).initialCtx + + protected def sourcesRequired: Boolean = true + + protected def command: CompilerCommand = ScalacCommand + + /** Setup context with initialized settings from CLI arguments, then check if there are any settings that + * would change the default behaviour of the compiler. + * + * @return If there is no setting like `-help` preventing us from continuing compilation, + * this method returns a list of files to compile and an updated Context. + * If compilation should be interrupted, this method returns None. + */ + def setup(args: Array[String], rootCtx: Context): Option[(List[AbstractFile], DetachedContext)] = { + val ictx = rootCtx.fresh + val summary = command.distill(args, ictx.settings)(ictx.settingsState)(using ictx) + ictx.setSettings(summary.sstate) + Feature.checkExperimentalSettings(using ictx) + MacroClassLoader.init(ictx) + Positioned.init(using ictx) + + inContext(ictx) { + if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then + ictx.setProperty(ContextDoc, new ContextDocstrings) + val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) + fileNamesOrNone.map { fileNames => + val files = fileNames.map(ctx.getFile) + (files, fromTastySetup(files).detach) + } + } + } + + /** Setup extra classpath of tasty and jar files */ + protected def fromTastySetup(files: List[AbstractFile])(using Context): Context = + if ctx.settings.fromTasty.value then + val newEntries: List[String] = files + .flatMap { file => + if !file.exists then + report.error(em"File does not exist: ${file.path}") + None + else file.extension match + case "jar" => Some(file.path) + case "tasty" => + TastyFileUtil.getClassPath(file) match + case Some(classpath) => Some(classpath) + case _ => + report.error(em"Could not load classname from: ${file.path}") + None + case _ => + report.error(em"File extension is not `tasty` or `jar`: ${file.path}") + None + } + .distinct + val ctx1 = ctx.fresh + val fullClassPath = + (newEntries :+ ctx.settings.classpath.value).mkString(java.io.File.pathSeparator.nn) + ctx1.setSetting(ctx1.settings.classpath, fullClassPath) + else ctx + + /** Entry point to the compiler that can be conveniently used with Java reflection. + * + * This entry point can easily be used without depending on the `dotty` package, + * you only need to depend on `dotty-interfaces` and call this method using + * reflection. This allows you to write code that will work against multiple + * versions of dotty without recompilation. + * + * The trade-off is that you can only pass a SimpleReporter to this method + * and not a normal Reporter which is more powerful. + * + * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala]] + * + * @param args Arguments to pass to the compiler. + * @param simple Used to log errors, warnings, and info messages. + * The default reporter is used if this is `null`. + * @param callback Used to execute custom code during the compilation + * process. No callbacks will be executed if this is `null`. + * @return + */ + final def process(args: Array[String], simple: interfaces.SimpleReporter | Null, + callback: interfaces.CompilerCallback | Null): interfaces.ReporterResult = { + val reporter = if (simple == null) null else Reporter.fromSimpleReporter(simple) + process(args, reporter, callback) + } + + /** Principal entry point to the compiler. + * + * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] + * in method `runCompiler` + * + * @param args Arguments to pass to the compiler. + * @param reporter Used to log errors, warnings, and info messages. + * The default reporter is used if this is `null`. + * @param callback Used to execute custom code during the compilation + * process. No callbacks will be executed if this is `null`. + * @return The `Reporter` used. Use `Reporter#hasErrors` to check + * if compilation succeeded. + */ + final def process(args: Array[String], reporter: Reporter | Null = null, + callback: interfaces.CompilerCallback | Null = null): Reporter = { + val compileCtx = initCtx.fresh + if (reporter != null) + compileCtx.setReporter(reporter) + if (callback != null) + compileCtx.setCompilerCallback(callback) + process(args, compileCtx) + } + + /** Entry point to the compiler with no optional arguments. + * + * This overload is provided for compatibility reasons: the + * `RawCompiler` of sbt expects this method to exist and calls + * it using reflection. Keeping it means that we can change + * the other overloads without worrying about breaking compatibility + * with sbt. + */ + final def process(args: Array[String]): Reporter = + process(args, null: Reporter | Null, null: interfaces.CompilerCallback | Null) + + /** Entry point to the compiler using a custom `Context`. + * + * In most cases, you do not need a custom `Context` and should + * instead use one of the other overloads of `process`. However, + * the other overloads cannot be overridden, instead you + * should override this one which they call internally. + * + * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] + * in method `runCompilerWithContext` + * + * @param args Arguments to pass to the compiler. + * @param rootCtx The root Context to use. + * @return The `Reporter` used. Use `Reporter#hasErrors` to check + * if compilation succeeded. + */ + def process(args: Array[String], rootCtx: Context): Reporter = { + setup(args, rootCtx) match + case Some((files, compileCtx)) => + doCompile(newCompiler(using compileCtx), files)(using compileCtx) + case None => + rootCtx.reporter + } + + def main(args: Array[String]): Unit = { + // Preload scala.util.control.NonFatal. Otherwise, when trying to catch a StackOverflowError, + // we may try to load it but fail with another StackOverflowError and lose the original exception, + // see . + val _ = NonFatal + sys.exit(if (process(args).hasErrors) 1 else 0) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Main.scala b/tests/pos-with-compiler-cc/dotc/Main.scala new file mode 100644 index 000000000000..3288fded52a2 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Main.scala @@ -0,0 +1,5 @@ +package dotty.tools +package dotc + +/** Main class of the `dotc` batch compiler. */ +object Main extends Driver diff --git a/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala b/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala new file mode 100644 index 000000000000..ae20d81226c9 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala @@ -0,0 +1,9 @@ +package dotty.tools.dotc + +import dotty.tools.FatalError + +class MissingCoreLibraryException(rootPackage: String) extends FatalError( + s"""Could not find package $rootPackage from compiler core libraries. + |Make sure the compiler core libraries are on the classpath. + """.stripMargin +) diff --git a/tests/pos-with-compiler-cc/dotc/Resident.scala b/tests/pos-with-compiler-cc/dotc/Resident.scala new file mode 100644 index 000000000000..9ebeaaaeb1c2 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Resident.scala @@ -0,0 +1,61 @@ +package dotty.tools +package dotc + +import core.Contexts._ +import reporting.Reporter +import java.io.EOFException +import scala.annotation.tailrec + +/** A compiler which stays resident between runs. This is more of a PoC than + * something that's expected to be used often + * + * Usage: + * + * > scala dotty.tools.dotc.Resident + * + * dotc> "more options and files to compile" + * + * ... + * + * dotc> :reset // reset all options to the ones passed on the command line + * + * ... + * + * dotc> :q // quit + */ +class Resident extends Driver { + + object residentCompiler extends Compiler + + override def sourcesRequired: Boolean = false + + private val quit = ":q" + private val reset = ":reset" + private val prompt = "dotc> " + + private def getLine() = { + Console.print(prompt) + try scala.io.StdIn.readLine() catch { case _: EOFException => quit } + } + + final override def process(args: Array[String], rootCtx: Context): Reporter = { + @tailrec def loop(args: Array[String], prevCtx: Context): Reporter = { + setup(args, prevCtx) match + case Some((files, ctx)) => + inContext(ctx) { + doCompile(residentCompiler, files) + } + var nextCtx: DetachedContext = ctx + var line = getLine() + while (line == reset) { + nextCtx = rootCtx.detach + line = getLine() + } + if line.startsWith(quit) then ctx.reporter + else loop((line split "\\s+").asInstanceOf[Array[String]], nextCtx) + case None => + prevCtx.reporter + } + loop(args, rootCtx) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/Run.scala b/tests/pos-with-compiler-cc/dotc/Run.scala new file mode 100644 index 000000000000..96f8c6a7b06f --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/Run.scala @@ -0,0 +1,404 @@ +package dotty.tools +package dotc + +import core._ +import Contexts._ +import Periods._ +import Symbols._ +import Scopes._ +import Names.Name +import Denotations.Denotation +import typer.Typer +import typer.ImportInfo.withRootImports +import Decorators._ +import io.AbstractFile +import Phases.unfusedPhases + +import util._ +import reporting.{Suppression, Action, Profile, ActiveProfile, NoProfile} +import reporting.Diagnostic +import reporting.Diagnostic.Warning +import rewrites.Rewrites +import profile.Profiler +import printing.XprintMode +import typer.ImplicitRunInfo +import config.Feature +import StdNames.nme + +import java.io.{BufferedWriter, OutputStreamWriter} +import java.nio.charset.StandardCharsets + +import scala.collection.mutable +import scala.util.control.NonFatal +import scala.io.Codec +import annotation.constructorOnly +import annotation.unchecked.uncheckedCaptures + +/** A compiler run. Exports various methods to compile source files */ +class Run(comp: Compiler, @constructorOnly ictx0: Context) extends ImplicitRunInfo with ConstraintRunInfo { + + val ictx = ictx0.detach + + /** Default timeout to stop looking for further implicit suggestions, in ms. + * This is usually for the first import suggestion; subsequent suggestions + * may get smaller timeouts. @see ImportSuggestions.reduceTimeBudget + */ + private var myImportSuggestionBudget: Int = + Int.MinValue // sentinel value; means whatever is set in command line option + + def importSuggestionBudget = + if myImportSuggestionBudget == Int.MinValue then ictx.settings.XimportSuggestionTimeout.value + else myImportSuggestionBudget + + def importSuggestionBudget_=(x: Int) = + myImportSuggestionBudget = x + + /** If this variable is set to `true`, some core typer operations will + * return immediately. Currently these early abort operations are + * `Typer.typed` and `Implicits.typedImplicit`. + */ + @volatile var isCancelled = false + + private var compiling = false + + private var myUnits: List[CompilationUnit] = Nil + private var myUnitsCached: List[CompilationUnit] = Nil + private var myFiles: Set[AbstractFile] = _ + + // `@nowarn` annotations by source file, populated during typer + private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty + // source files whose `@nowarn` annotations are processed + private val mySuppressionsComplete: mutable.Set[SourceFile] = mutable.Set.empty + // warnings issued before a source file's `@nowarn` annotations are processed, suspended so that `@nowarn` can filter them + private val mySuspendedMessages: mutable.LinkedHashMap[SourceFile, mutable.LinkedHashSet[Warning]] = mutable.LinkedHashMap.empty + + object suppressions: + // When the REPL creates a new run (ReplDriver.compile), parsing is already done in the old context, with the + // previous Run. Parser warnings were suspended in the old run and need to be copied over so they are not lost. + // Same as scala/scala/commit/79ca1408c7. + def initSuspendedMessages(oldRun: Run | Null) = if oldRun != null then + mySuspendedMessages.clear() + mySuspendedMessages ++= oldRun.mySuspendedMessages + + def suppressionsComplete(source: SourceFile) = source == NoSource || mySuppressionsComplete(source) + + def addSuspendedMessage(warning: Warning) = + mySuspendedMessages.getOrElseUpdate(warning.pos.source, mutable.LinkedHashSet.empty) += warning + + def nowarnAction(dia: Diagnostic): Action.Warning.type | Action.Verbose.type | Action.Silent.type = + mySuppressions.getOrElse(dia.pos.source, Nil).find(_.matches(dia)) match { + case Some(s) => + s.markUsed() + if (s.verbose) Action.Verbose + else Action.Silent + case _ => + Action.Warning + } + + def addSuppression(sup: Suppression): Unit = + val source = sup.annotPos.source + mySuppressions.getOrElseUpdate(source, mutable.ListBuffer.empty) += sup + + def reportSuspendedMessages(source: SourceFile)(using Context): Unit = { + // sort suppressions. they are not added in any particular order because of lazy type completion + for (sups <- mySuppressions.get(source)) + mySuppressions(source) = sups.sortBy(sup => 0 - sup.start) + mySuppressionsComplete += source + mySuspendedMessages.remove(source).foreach(_.foreach(ctx.reporter.issueIfNotSuppressed)) + } + + def runFinished(hasErrors: Boolean): Unit = + // report suspended messages (in case the run finished before typer) + mySuspendedMessages.keysIterator.toList.foreach(reportSuspendedMessages) + // report unused nowarns only if all all phases are done + if !hasErrors && ctx.settings.WunusedHas.nowarn then + for { + source <- mySuppressions.keysIterator.toList + sups <- mySuppressions.remove(source) + sup <- sups.reverse + } if (!sup.used) + report.warning("@nowarn annotation does not suppress any warnings", sup.annotPos) + + /** The compilation units currently being compiled, this may return different + * results over time. + */ + def units: List[CompilationUnit] = myUnits + + private def units_=(us: List[CompilationUnit]): Unit = + myUnits = us + + var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() + + def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = + if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then + val where = + if suspendedUnits.size == 1 then i"in ${suspendedUnits.head}." + else i"""among + | + | ${suspendedUnits.toList}%, % + |""" + val enableXprintSuspensionHint = + if ctx.settings.XprintSuspension.value then "" + else "\n\nCompiling with -Xprint-suspension gives more information." + report.error(em"""Cyclic macro dependencies $where + |Compilation stopped since no further progress can be made. + | + |To fix this, place macros in one set of files and their callers in another.$enableXprintSuspensionHint""") + + /** The files currently being compiled (active or suspended). + * This may return different results over time. + * These files do not have to be source files since it's possible to compile + * from TASTY. + */ + def files: Set[AbstractFile] = { + if (myUnits ne myUnitsCached) { + myUnitsCached = myUnits + myFiles = (myUnits ++ suspendedUnits).map(_.source.file).toSet + } + myFiles + } + + /** The source files of all late entered symbols, as a set */ + private var lateFiles = mutable.Set[AbstractFile]() + + /** A cache for static references to packages and classes */ + val staticRefs = util.EqHashMap[Name, Denotation](initialCapacity = 1024) + + /** Actions that need to be performed at the end of the current compilation run */ + @uncheckedCaptures + private var finalizeActions = mutable.ListBuffer[() => Unit]() + + /** Will be set to true if any of the compiled compilation units contains + * a pureFunctions language import. + */ + var pureFunsImportEncountered = false + + /** Will be set to true if any of the compiled compilation units contains + * a captureChecking language import. + */ + var ccImportEncountered = false + + def compile(files: List[AbstractFile]): Unit = + try + val codec = Codec(runContext.settings.encoding.value) + val sources = files.map(runContext.getSource(_, codec)) + compileSources(sources) + catch + case NonFatal(ex) => + if units.nonEmpty then report.echo(i"exception occurred while compiling $units%, %") + else report.echo(s"exception occurred while compiling ${files.map(_.name).mkString(", ")}") + throw ex + + /** TODO: There's a fundamental design problem here: We assemble phases using `fusePhases` + * when we first build the compiler. But we modify them with -Yskip, -Ystop + * on each run. That modification needs to either transform the tree structure, + * or we need to assemble phases on each run, and take -Yskip, -Ystop into + * account. I think the latter would be preferable. + */ + def compileSources(sources: List[SourceFile]): Unit = + if (sources forall (_.exists)) { + units = sources.map(CompilationUnit(_)) + compileUnits() + } + + + def compileUnits(us: List[CompilationUnit]): Unit = { + units = us + compileUnits() + } + + def compileUnits(us: List[CompilationUnit], ctx: Context): Unit = { + units = us + compileUnits()(using ctx) + } + + var profile: Profile = NoProfile + + private def compileUnits()(using Context) = Stats.maybeMonitored { + if (!ctx.mode.is(Mode.Interactive)) // IDEs might have multi-threaded access, accesses are synchronized + ctx.base.checkSingleThreaded() + + compiling = true + + profile = + if ctx.settings.Vprofile.value + || !ctx.settings.VprofileSortedBy.value.isEmpty + || ctx.settings.VprofileDetails.value != 0 + then ActiveProfile(ctx.settings.VprofileDetails.value.max(0).min(1000)) + else NoProfile + + // If testing pickler, make sure to stop after pickling phase: + val stopAfter = + if (ctx.settings.YtestPickler.value) List("pickler") + else ctx.settings.YstopAfter.value + + val pluginPlan = ctx.base.addPluginPhases(ctx.base.phasePlan) + val phases = ctx.base.fusePhases(pluginPlan, + ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value) + ctx.base.usePhases(phases) + + def runPhases(using Context) = { + var lastPrintedTree: PrintedTree = NoPrintedTree + val profiler = ctx.profiler + var phasesWereAdjusted = false + + for (phase <- ctx.base.allPhases) + if (phase.isRunnable) + Stats.trackTime(s"$phase ms ") { + val start = System.currentTimeMillis + val profileBefore = profiler.beforePhase(phase) + units = phase.runOn(units) + profiler.afterPhase(phase, profileBefore) + if (ctx.settings.Xprint.value.containsPhase(phase)) + for (unit <- units) + lastPrintedTree = + printTree(lastPrintedTree)(using ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) + report.informTime(s"$phase ", start) + Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) + for (unit <- units) + Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) + ctx.typerState.gc() + } + if !phasesWereAdjusted then + phasesWereAdjusted = true + if !Feature.ccEnabledSomewhere then + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) + ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) + + profiler.finished() + } + + val runCtx = ctx.fresh + runCtx.setProfiler(Profiler()) + unfusedPhases.foreach(_.initContext(runCtx)) + runPhases(using runCtx) + if (!ctx.reporter.hasErrors) + Rewrites.writeBack() + suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) + while (finalizeActions.nonEmpty) { + val action = finalizeActions.remove(0) + action() + } + compiling = false + } + + /** Enter top-level definitions of classes and objects contained in source file `file`. + * The newly added symbols replace any previously entered symbols. + * If `typeCheck = true`, also run typer on the compilation unit, and set + * `rootTreeOrProvider`. + */ + def lateCompile(file: AbstractFile, typeCheck: Boolean)(using Context): Unit = + if (!files.contains(file) && !lateFiles.contains(file)) { + lateFiles += file + + val codec = Codec(ctx.settings.encoding.value) + val unit = CompilationUnit(ctx.getSource(file, codec)) + val unitCtx = runContext.fresh + .setCompilationUnit(unit) + .withRootImports + + def process()(using Context) = + ctx.typer.lateEnterUnit(doTypeCheck => + if typeCheck then + if compiling then finalizeActions += doTypeCheck + else doTypeCheck() + ) + + process()(using unitCtx) + } + + private sealed trait PrintedTree + private /*final*/ case class SomePrintedTree(phase: String, tree: String) extends PrintedTree + private object NoPrintedTree extends PrintedTree + + private def printTree(last: PrintedTree)(using Context): PrintedTree = { + val unit = ctx.compilationUnit + val fusedPhase = ctx.phase.prevMega + val echoHeader = f"[[syntax trees at end of $fusedPhase%25s]] // ${unit.source}" + val tree = if ctx.isAfterTyper then unit.tpdTree else unit.untpdTree + val treeString = fusedPhase.show(tree) + + last match { + case SomePrintedTree(phase, lastTreeString) if lastTreeString == treeString => + report.echo(s"$echoHeader: unchanged since $phase") + last + + case SomePrintedTree(phase, lastTreeString) if ctx.settings.XprintDiff.value || ctx.settings.XprintDiffDel.value => + val diff = DiffUtil.mkColoredCodeDiff(treeString, lastTreeString, ctx.settings.XprintDiffDel.value) + report.echo(s"$echoHeader\n$diff\n") + SomePrintedTree(fusedPhase.phaseName, treeString) + + case _ => + report.echo(s"$echoHeader\n$treeString\n") + SomePrintedTree(fusedPhase.phaseName, treeString) + } + } + + def compileFromStrings(scalaSources: List[String], javaSources: List[String] = Nil): Unit = { + def sourceFile(source: String, isJava: Boolean): SourceFile = { + val uuid = java.util.UUID.randomUUID().toString + val ext = if (isJava) "java" else "scala" + val name = s"compileFromString-$uuid.$ext" + SourceFile.virtual(name, source) + } + val sources = + scalaSources.map(sourceFile(_, isJava = false)) ++ + javaSources.map(sourceFile(_, isJava = true)) + + compileSources(sources) + } + + /** Print summary of warnings and errors encountered */ + def printSummary(): Unit = { + printMaxConstraint() + val r = runContext.reporter + if !r.errorsReported then + profile.printSummary() + r.summarizeUnreportedWarnings() + r.printSummary() + } + + override def reset(): Unit = { + super[ImplicitRunInfo].reset() + super[ConstraintRunInfo].reset() + myCtx = null + myUnits = Nil + myUnitsCached = Nil + } + + /** Produces the following contexts, from outermost to innermost + * + * bootStrap: A context with next available runId and a scope consisting of + * the RootPackage _root_ + * start A context with RootClass as owner and the necessary initializations + * for type checking. + * imports For each element of RootImports, an import context + */ + protected def rootContext(using Context): DetachedContext = { + ctx.initialize() + ctx.base.setPhasePlan(comp.phases) + val rootScope = new MutableScope(0) + val bootstrap = ctx.fresh + .setPeriod(Period(comp.nextRunId, FirstPhaseId)) + .setScope(rootScope) + rootScope.enter(ctx.definitions.RootPackage)(using bootstrap) + var start = bootstrap.fresh + .setOwner(defn.RootClass) + .setTyper(new Typer) + .addMode(Mode.ImplicitsEnabled) + .setTyperState(ctx.typerState.fresh(ctx.reporter)) + if ctx.settings.YexplicitNulls.value && !Feature.enabledBySetting(nme.unsafeNulls) then + start = start.addMode(Mode.SafeNulls) + ctx.initialize()(using start) // re-initialize the base context with start + + // `this` must be unchecked for safe initialization because by being passed to setRun during + // initialization, it is not yet considered fully initialized by the initialization checker + start.setRun(this: @unchecked).detach + } + + private var myCtx: DetachedContext | Null = rootContext(using ictx) + + /** The context created for this run */ + given runContext[Dummy_so_its_a_def]: DetachedContext = myCtx.nn + assert(runContext.runId <= Periods.MaxPossibleRunId) +} diff --git a/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala b/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala new file mode 100644 index 000000000000..2e0d9a08f25d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala @@ -0,0 +1,9 @@ +package dotty.tools.dotc + +import config.Properties._ +import config.CompilerCommand + +object ScalacCommand extends CompilerCommand: + override def cmdName: String = "scalac" + override def versionMsg: String = s"Scala compiler $versionString -- $copyrightString" + override def ifErrorsMsg: String = " scalac -help gives more information" diff --git a/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled b/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled new file mode 100644 index 000000000000..6bf7530faf24 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled @@ -0,0 +1,258 @@ +package dotty.tools +package dotc +package ast + +import core._ +import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ +import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._ + +// TODO: revise, integrate in a checking phase. +object CheckTrees { + + import tpd._ + + def check(p: Boolean, msg: => String = "")(using Context): Unit = assert(p, msg) + + def checkTypeArg(arg: Tree, bounds: TypeBounds)(using Context): Unit = { + check(arg.isValueType) + check(bounds contains arg.tpe) + } + + def escapingRefs(block: Block)(using Context): collection.Set[NamedType] = { + var hoisted: Set[Symbol] = Set() + lazy val locals = ctx.typeAssigner.localSyms(block.stats).toSet + def isLocal(sym: Symbol): Boolean = + (locals contains sym) && !isHoistableClass(sym) + def isHoistableClass(sym: Symbol) = + sym.isClass && { + (hoisted contains sym) || { + hoisted += sym + !classLeaks(sym.asClass) + } + } + def leakingTypes(tp: Type): collection.Set[NamedType] = + tp namedPartsWith (tp => isLocal(tp.symbol)) + def typeLeaks(tp: Type): Boolean = leakingTypes(tp).nonEmpty + def classLeaks(sym: ClassSymbol): Boolean = + (ctx.owner is Method) || // can't hoist classes out of method bodies + (sym.info.parents exists typeLeaks) || + (sym.decls.toList exists (t => typeLeaks(t.info))) + leakingTypes(block.tpe) + } + + def checkType(tree: Tree)(using Context): Unit = tree match { + case Ident(name) => + case Select(qualifier, name) => + check(qualifier.isValue) + check(qualifier.tpe =:= tree.tpe.normalizedPrefix) + val denot = qualifier.tpe.member(name) + check(denot.exists) + check(denot.hasAltWith(_.symbol == tree.symbol)) + case This(cls) => + case Super(qual, mixin) => + check(qual.isValue) + val cls = qual.tpe.typeSymbol + check(cls.isClass) + case Apply(fn, args) => + def checkArg(arg: Tree, name: Name, formal: Type): Unit = { + arg match { + case NamedArg(argName, _) => + check(argName == name) + case _ => + check(arg.isValue) + } + check(arg.tpe <:< formal) + } + val MethodType(paramNames, paramTypes) = fn.tpe.widen // checked already at construction + args.lazyZip(paramNames).lazyZip(paramTypes) foreach checkArg + case TypeApply(fn, args) => + val pt @ PolyType(_) = fn.tpe.widen // checked already at construction + args.lazyZip(pt.instantiateBounds(args map (_.tpe))) foreach checkTypeArg + case Literal(const: Constant) => + case New(tpt) => + check(tpt.isValueType) + val cls = tpt.tpe.typeSymbol + check(cls.isClass) + check(!(cls is AbstractOrTrait)) + case Pair(left, right) => + check(left.isValue) + check(right.isValue) + case Typed(expr, tpt) => + check(tpt.isValueType) + expr.tpe.widen match { + case tp: MethodType => + val cls = tpt.tpe.typeSymbol + check(cls.isClass) + check((cls is Trait) || + cls.primaryConstructor.info.paramTypess.flatten.isEmpty) + val absMembers = tpt.tpe.abstractTermMembers + check(absMembers.size == 1) + check(tp <:< absMembers.head.info) + case _ => + check(expr.isValueOrPattern) + check(expr.tpe <:< tpt.tpe.translateParameterized(defn.RepeatedParamClass, defn.SeqClass)) + } + case NamedArg(name, arg) => + case Assign(lhs, rhs) => + check(lhs.isValue); check(rhs.isValue) + lhs.tpe match { + case ltpe: TermRef => + check(ltpe.symbol is Mutable) + case _ => + check(false) + } + check(rhs.tpe <:< lhs.tpe.widen) + case tree @ Block(stats, expr) => + check(expr.isValue) + check(escapingRefs(tree).isEmpty) + case If(cond, thenp, elsep) => + check(cond.isValue); check(thenp.isValue); check(elsep.isValue) + check(cond.tpe isRef defn.BooleanClass) + case Closure(env, meth, target) => + meth.tpe.widen match { + case mt @ MethodType(_, paramTypes) => + if (target.isEmpty) { + check(env.length < paramTypes.length) + for ((arg, formal) <- env zip paramTypes) + check(arg.tpe <:< formal) + } + else + // env is stored in class, not method + target.tpe match { + case SAMType(targetMeth) => + check(mt <:< targetMeth.info) + } + } + case Match(selector, cases) => + check(selector.isValue) + // are any checks that relate selector and patterns desirable? + case CaseDef(pat, guard, body) => + check(pat.isValueOrPattern); check(guard.isValue); check(body.isValue) + check(guard.tpe.derivesFrom(defn.BooleanClass)) + case Return(expr, from) => + check(expr.isValue); check(from.isTerm) + check(from.tpe.termSymbol.isRealMethod) + case Try(block, handler, finalizer) => + check(block.isTerm) + check(finalizer.isTerm) + check(handler.isTerm) + check(handler.tpe derivesFrom defn.FunctionClass(1)) + check(handler.tpe.baseArgInfos(defn.FunctionClass(1)).head <:< defn.ThrowableType) + case Throw(expr) => + check(expr.isValue) + check(expr.tpe.derivesFrom(defn.ThrowableClass)) + case SeqLiteral(elems) => + val elemtp = tree.tpe.elemType + for (elem <- elems) { + check(elem.isValue) + check(elem.tpe <:< elemtp) + } + case TypeTree(original) => + if (!original.isEmpty) { + check(original.isValueType) + check(original.tpe == tree.tpe) + } + case SingletonTypeTree(ref) => + check(ref.isValue) + check(ref.symbol.isStable) + case SelectFromTypeTree(qualifier, name) => + check(qualifier.isValueType) + check(qualifier.tpe =:= tree.tpe.normalizedPrefix) + val denot = qualifier.tpe.member(name) + check(denot.exists) + check(denot.symbol == tree.symbol) + case AndTypeTree(left, right) => + check(left.isValueType); check(right.isValueType) + case OrTypeTree(left, right) => + check(left.isValueType); check(right.isValueType) + case RefinedTypeTree(tpt, refinements) => + check(tpt.isValueType) + def checkRefinements(forbidden: Set[Symbol], rs: List[Tree]): Unit = rs match { + case r :: rs1 => + val rsym = r.symbol + check(rsym.isTerm || rsym.isAbstractOrAliasType) + if (rsym.isAbstractType) check(tpt.tpe.member(rsym.name).exists) + check(rsym.info forallParts { + case nt: NamedType => !(forbidden contains nt.symbol) + case _ => true + }) + checkRefinements(forbidden - rsym, rs1) + case nil => + } + checkRefinements(ctx.typeAssigner.localSyms(refinements).toSet, refinements) + case AppliedTypeTree(tpt, args) => + check(tpt.isValueType) + val tparams = tpt.tpe.typeParams + check(sameLength(tparams, args)) + args.lazyZip(tparams map (_.info.bounds)) foreach checkTypeArg + case TypeBoundsTree(lo, hi) => + check(lo.isValueType); check(hi.isValueType) + check(lo.tpe <:< hi.tpe) + case Bind(sym, body) => + check(body.isValueOrPattern) + check(!(tree.symbol is Method)) + body match { + case Ident(nme.WILDCARD) => + case _ => check(body.tpe.widen =:= tree.symbol.info) + } + case Alternative(alts) => + for (alt <- alts) check(alt.isValueOrPattern) + case UnApply(fun, implicits, args) => // todo: review + check(fun.isTerm) + for (arg <- args) check(arg.isValueOrPattern) + val funtpe @ MethodType(_, _) = fun.tpe.widen + fun.symbol.name match { // check arg arity + case nme.unapplySeq => + // args need to be wrapped in (...: _*) + check(args.length == 1) + check(args.head.isInstanceOf[SeqLiteral]) + case nme.unapply => + val rtp = funtpe.resultType + if (rtp isRef defn.BooleanClass) + check(args.isEmpty) + else { + check(rtp isRef defn.OptionClass) + val normArgs = rtp.argTypesHi match { + case optionArg :: Nil => + optionArg.argTypesHi match { + case Nil => + optionArg :: Nil + case tupleArgs if defn.isTupleNType(optionArg) => + tupleArgs + } + case _ => + check(false) + Nil + } + check(sameLength(normArgs, args)) + } + } + case ValDef(mods, name, tpt, rhs) => + check(!(tree.symbol is Method)) + if (!rhs.isEmpty) { + check(rhs.isValue) + check(rhs.tpe <:< tpt.tpe) + } + case DefDef(mods, name, tparams, vparamss, tpt, rhs) => + check(tree.symbol is Method) + if (!rhs.isEmpty) { + check(rhs.isValue) + check(rhs.tpe <:< tpt.tpe) + } + case TypeDef(mods, name, tpt) => + check(tpt.isInstanceOf[Template] || tpt.tpe.isInstanceOf[TypeBounds]) + case Template(constr, parents, selfType, body) => + case Import(expr, selectors) => + check(expr.isValue) + check(expr.tpe.termSymbol.isStable) + case PackageDef(pid, stats) => + check(pid.isTerm) + check(pid.symbol is Package) + case Annotated(annot, arg) => + check(annot.isInstantiation) + check(annot.symbol.owner.isSubClass(defn.AnnotationClass)) + check(arg.isValueType || arg.isValue) + case EmptyTree => + } +} + diff --git a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala new file mode 100644 index 000000000000..390e58d89245 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala @@ -0,0 +1,1979 @@ +package dotty.tools +package dotc +package ast + +import core._ +import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._ +import Symbols._, StdNames._, Trees._, ContextOps._ +import Decorators._, transform.SymUtils._ +import Annotations.Annotation +import NameKinds.{UniqueName, EvidenceParamName, DefaultGetterName, WildcardParamName} +import typer.{Namer, Checking} +import util.{Property, SourceFile, SourcePosition, Chars} +import config.Feature.{sourceVersion, migrateTo3, enabled} +import config.SourceVersion._ +import collection.mutable.ListBuffer +import reporting._ +import annotation.constructorOnly +import printing.Formatting.hl +import config.Printers + +import scala.annotation.internal.sharable + +object desugar { + import untpd._ + import DesugarEnums._ + + /** An attachment for companion modules of classes that have a `derives` clause. + * The position value indicates the start position of the template of the + * deriving class. + */ + val DerivingCompanion: Property.Key[SourcePosition] = Property.Key() + + /** An attachment for match expressions generated from a PatDef or GenFrom. + * Value of key == one of IrrefutablePatDef, IrrefutableGenFrom + */ + val CheckIrrefutable: Property.Key[MatchCheck] = Property.StickyKey() + + /** A multi-line infix operation with the infix operator starting a new line. + * Used for explaining potential errors. + */ + val MultiLineInfix: Property.Key[Unit] = Property.StickyKey() + + /** An attachment key to indicate that a ValDef originated from parameter untupling. + */ + val UntupledParam: Property.Key[Unit] = Property.StickyKey() + + /** What static check should be applied to a Match? */ + enum MatchCheck { + case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom + } + + /** Is `name` the name of a method that can be invalidated as a compiler-generated + * case class method if it clashes with a user-defined method? + */ + def isRetractableCaseClassMethodName(name: Name)(using Context): Boolean = name match { + case nme.apply | nme.unapply | nme.unapplySeq | nme.copy => true + case DefaultGetterName(nme.copy, _) => true + case _ => false + } + + /** Is `name` the name of a method that is added unconditionally to case classes? */ + def isDesugaredCaseClassMethodName(name: Name)(using Context): Boolean = + isRetractableCaseClassMethodName(name) || name.isSelectorName + +// ----- DerivedTypeTrees ----------------------------------- + + class SetterParamTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.info.resultType) + } + + class TypeRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.typeRef) + } + + class TermRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + def derivedTree(sym: Symbol)(using Context): tpd.Tree = tpd.ref(sym) + } + + /** A type tree that computes its type from an existing parameter. */ + class DerivedFromParamTree()(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + + /** Complete the appropriate constructors so that OriginalSymbol attachments are + * pushed to DerivedTypeTrees. + */ + override def ensureCompletions(using Context): Unit = { + def completeConstructor(sym: Symbol) = + sym.infoOrCompleter match { + case completer: Namer#ClassCompleter => + completer.completeConstructor(sym) + case _ => + } + + if (!ctx.owner.is(Package)) + if (ctx.owner.isClass) { + completeConstructor(ctx.owner) + if (ctx.owner.is(ModuleClass)) + completeConstructor(ctx.owner.linkedClass) + } + else ensureCompletions(using ctx.outer) + } + + /** Return info of original symbol, where all references to siblings of the + * original symbol (i.e. sibling and original symbol have the same owner) + * are rewired to same-named parameters or accessors in the scope enclosing + * the current scope. The current scope is the scope owned by the defined symbol + * itself, that's why we have to look one scope further out. If the resulting + * type is an alias type, dealias it. This is necessary because the + * accessor of a type parameter is a private type alias that cannot be accessed + * from subclasses. + */ + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = { + val dctx = ctx.detach + val relocate = new TypeMap(using dctx) { + val originalOwner = sym.owner + def apply(tp: Type) = tp match { + case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) => + val defctx = mapCtx.detach.outersIterator.dropWhile(_.scope eq mapCtx.scope).next() + var local = defctx.denotNamed(tp.name).suchThat(_.isParamOrAccessor).symbol + if (local.exists) (defctx.owner.thisType select local).dealiasKeepAnnots + else { + def msg = + em"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" + ErrorType(msg).assertingErrorsReported(msg) + } + case _ => + mapOver(tp) + } + } + tpd.TypeTree(relocate(sym.info)) + } + } + + /** A type definition copied from `tdef` with a rhs typetree derived from it */ + def derivedTypeParam(tdef: TypeDef)(using Context): TypeDef = + cpy.TypeDef(tdef)( + rhs = DerivedFromParamTree().withSpan(tdef.rhs.span).watching(tdef) + ) + + /** A derived type definition watching `sym` */ + def derivedTypeParamWithVariance(sym: TypeSymbol)(using Context): TypeDef = + val variance = VarianceFlags & sym.flags + TypeDef(sym.name, DerivedFromParamTree().watching(sym)).withFlags(TypeParam | Synthetic | variance) + + /** A value definition copied from `vdef` with a tpt typetree derived from it */ + def derivedTermParam(vdef: ValDef)(using Context): ValDef = + cpy.ValDef(vdef)( + tpt = DerivedFromParamTree().withSpan(vdef.tpt.span).watching(vdef)) + +// ----- Desugar methods ------------------------------------------------- + + /** Setter generation is needed for: + * - non-private class members + * - all trait members + * - all package object members + */ + def isSetterNeeded(valDef: ValDef)(using Context): Boolean = { + val mods = valDef.mods + mods.is(Mutable) + && ctx.owner.isClass + && (!mods.is(Private) || ctx.owner.is(Trait) || ctx.owner.isPackageObject) + } + + /** var x: Int = expr + * ==> + * def x: Int = expr + * def x_=($1: ): Unit = () + * + * Generate setter where needed + */ + def valDef(vdef0: ValDef)(using Context): Tree = + val vdef @ ValDef(_, tpt, rhs) = vdef0 + val valName = normalizeName(vdef, tpt).asTermName + var mods1 = vdef.mods + + def dropInto(tpt: Tree): Tree = tpt match + case Into(tpt1) => + mods1 = vdef.mods.withAddedAnnotation( + TypedSplice( + Annotation(defn.AllowConversionsAnnot).tree.withSpan(tpt.span.startPos))) + tpt1 + case ByNameTypeTree(tpt1) => + cpy.ByNameTypeTree(tpt)(dropInto(tpt1)) + case PostfixOp(tpt1, op) if op.name == tpnme.raw.STAR => + cpy.PostfixOp(tpt)(dropInto(tpt1), op) + case _ => + tpt + + val vdef1 = cpy.ValDef(vdef)(name = valName, tpt = dropInto(tpt)) + .withMods(mods1) + + if isSetterNeeded(vdef) then + val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) + // The rhs gets filled in later, when field is generated and getter has parameters (see Memoize miniphase) + val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral + val setter = cpy.DefDef(vdef)( + name = valName.setterName, + paramss = (setterParam :: Nil) :: Nil, + tpt = TypeTree(defn.UnitType), + rhs = setterRhs + ).withMods((vdef.mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) + .dropEndMarker() // the end marker should only appear on the getter definition + Thicket(vdef1, setter) + else vdef1 + end valDef + + def makeImplicitParameters(tpts: List[Tree], implicitFlag: FlagSet, forPrimaryConstructor: Boolean = false)(using Context): List[ValDef] = + for (tpt <- tpts) yield { + val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param + val epname = EvidenceParamName.fresh() + ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) + } + + def mapParamss(paramss: List[ParamClause]) + (mapTypeParam: TypeDef => TypeDef) + (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = + paramss.mapConserve { + case TypeDefs(tparams) => tparams.mapConserve(mapTypeParam) + case ValDefs(vparams) => vparams.mapConserve(mapTermParam) + case _ => unreachable() + } + + /** 1. Expand context bounds to evidence params. E.g., + * + * def f[T >: L <: H : B](params) + * ==> + * def f[T >: L <: H](params)(implicit evidence$0: B[T]) + * + * 2. Expand default arguments to default getters. E.g, + * + * def f[T: B](x: Int = 1)(y: String = x + "m") = ... + * ==> + * def f[T](x: Int)(y: String)(implicit evidence$0: B[T]) = ... + * def f$default$1[T] = 1 + * def f$default$2[T](x: Int) = x + "m" + */ + private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = + addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) + + private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = + val DefDef(_, paramss, tpt, rhs) = meth + val evidenceParamBuf = ListBuffer[ValDef]() + + def desugarContextBounds(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, cxbounds) => + val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit + evidenceParamBuf ++= makeImplicitParameters( + cxbounds, iflag, forPrimaryConstructor = isPrimaryConstructor) + tbounds + case LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) + case _ => + rhs + + val paramssNoContextBounds = + mapParamss(paramss) { + tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) + }(identity) + + rhs match + case MacroTree(call) => + cpy.DefDef(meth)(rhs = call).withMods(meth.mods | Macro | Erased) + case _ => + addEvidenceParams( + cpy.DefDef(meth)( + name = normalizeName(meth, tpt).asTermName, + paramss = paramssNoContextBounds), + evidenceParamBuf.toList) + end elimContextBounds + + def addDefaultGetters(meth: DefDef)(using Context): Tree = + + /** The longest prefix of parameter lists in paramss whose total number of + * ValDefs does not exceed `n` + */ + def takeUpTo(paramss: List[ParamClause], n: Int): List[ParamClause] = paramss match + case ValDefs(vparams) :: paramss1 => + val len = vparams.length + if len <= n then vparams :: takeUpTo(paramss1, n - len) else Nil + case TypeDefs(tparams) :: paramss1 => + tparams :: takeUpTo(paramss1, n) + case _ => + Nil + + def dropContextBounds(tparam: TypeDef): TypeDef = + def dropInRhs(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, _) => + tbounds + case rhs @ LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(rhs)(tparams, dropInRhs(body)) + case _ => + rhs + cpy.TypeDef(tparam)(rhs = dropInRhs(tparam.rhs)) + + def paramssNoRHS = mapParamss(meth.paramss)(identity) { + vparam => + if vparam.rhs.isEmpty then vparam + else cpy.ValDef(vparam)(rhs = EmptyTree).withMods(vparam.mods | HasDefault) + } + + def getterParamss(n: Int): List[ParamClause] = + mapParamss(takeUpTo(paramssNoRHS, n)) { + tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) + } { + vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) + } + + def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match + case ValDefs(vparam :: vparams) :: paramss1 => + def defaultGetter: DefDef = + DefDef( + name = DefaultGetterName(meth.name, n), + paramss = getterParamss(n), + tpt = TypeTree(), + rhs = vparam.rhs + ) + .withMods(Modifiers( + meth.mods.flags & (AccessFlags | Synthetic) | (vparam.mods.flags & Inline), + meth.mods.privateWithin)) + val rest = defaultGetters(vparams :: paramss1, n + 1) + if vparam.rhs.isEmpty then rest else defaultGetter :: rest + case _ :: paramss1 => // skip empty parameter lists and type parameters + defaultGetters(paramss1, n) + case Nil => + Nil + + val defGetters = defaultGetters(meth.paramss, 0) + if defGetters.isEmpty then meth + else Thicket(cpy.DefDef(meth)(paramss = paramssNoRHS) :: defGetters) + end addDefaultGetters + + /** Add an explicit ascription to the `expectedTpt` to every tail splice. + * + * - `'{ x }` -> `'{ x }` + * - `'{ $x }` -> `'{ $x: T }` + * - `'{ if (...) $x else $y }` -> `'{ if (...) ($x: T) else ($y: T) }` + * + * Note that the splice `$t: T` will be typed as `${t: Expr[T]}` + */ + def quotedPattern(tree: untpd.Tree, expectedTpt: untpd.Tree)(using Context): untpd.Tree = { + def adaptToExpectedTpt(tree: untpd.Tree): untpd.Tree = tree match { + // Add the expected type as an ascription + case _: untpd.Splice => + untpd.Typed(tree, expectedTpt).withSpan(tree.span) + case Typed(expr: untpd.Splice, tpt) => + cpy.Typed(tree)(expr, untpd.makeAndType(tpt, expectedTpt).withSpan(tpt.span)) + + // Propagate down the expected type to the leafs of the expression + case Block(stats, expr) => + cpy.Block(tree)(stats, adaptToExpectedTpt(expr)) + case If(cond, thenp, elsep) => + cpy.If(tree)(cond, adaptToExpectedTpt(thenp), adaptToExpectedTpt(elsep)) + case untpd.Parens(expr) => + cpy.Parens(tree)(adaptToExpectedTpt(expr)) + case Match(selector, cases) => + val newCases = cases.map(cdef => cpy.CaseDef(cdef)(body = adaptToExpectedTpt(cdef.body))) + cpy.Match(tree)(selector, newCases) + case untpd.ParsedTry(expr, handler, finalizer) => + cpy.ParsedTry(tree)(adaptToExpectedTpt(expr), adaptToExpectedTpt(handler), finalizer) + + // Tree does not need to be ascribed + case _ => + tree + } + adaptToExpectedTpt(tree) + } + + /** Add all evidence parameters in `params` as implicit parameters to `meth`. + * If the parameters of `meth` end in an implicit parameter list or using clause, + * evidence parameters are added in front of that list. Otherwise they are added + * as a separate parameter clause. + */ + private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = + params match + case Nil => + meth + case evidenceParams => + val paramss1 = meth.paramss.reverse match + case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => + ((evidenceParams ++ vparams) :: rparamss).reverse + case _ => + meth.paramss :+ evidenceParams + cpy.DefDef(meth)(paramss = paramss1) + + /** The implicit evidence parameters of `meth`, as generated by `desugar.defDef` */ + private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = + meth.paramss.reverse match { + case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => + vparams.takeWhile(_.name.is(EvidenceParamName)) + case _ => + Nil + } + + @sharable private val synthetic = Modifiers(Synthetic) + + private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { + var mods = tparam.rawMods + if (!keepAnnotations) mods = mods.withAnnotations(Nil) + tparam.withMods(mods & EmptyFlags | Param) + } + private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { + var mods = vparam.rawMods + if (!keepAnnotations) mods = mods.withAnnotations(Nil) + val hasDefault = if keepDefault then HasDefault else EmptyFlags + vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) + } + + def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = + paramss.foldLeft(fn) { (fn, params) => params match + case TypeDefs(params) => + TypeApply(fn, params.map(refOfDef)) + case (vparam: ValDef) :: _ if vparam.mods.is(Given) => + Apply(fn, params.map(refOfDef)).setApplyKind(ApplyKind.Using) + case _ => + Apply(fn, params.map(refOfDef)) + } + + /** The expansion of a class definition. See inline comments for what is involved */ + def classDef(cdef: TypeDef)(using Context): Tree = { + val impl @ Template(constr0, _, self, _) = cdef.rhs: @unchecked + val className = normalizeName(cdef, impl).asTypeName + val parents = impl.parents + val mods = cdef.mods + val companionMods = mods + .withFlags((mods.flags & (AccessFlags | Final)).toCommonFlags) + .withMods(Nil) + .withAnnotations(Nil) + + var defaultGetters: List[Tree] = Nil + + def decompose(ddef: Tree): DefDef = ddef match { + case meth: DefDef => meth + case Thicket((meth: DefDef) :: defaults) => + defaultGetters = defaults + meth + } + + val constr1 = decompose(defDef(impl.constr, isPrimaryConstructor = true)) + + // The original type and value parameters in the constructor already have the flags + // needed to be type members (i.e. param, and possibly also private and local unless + // prefixed by type or val). `tparams` and `vparamss` are the type parameters that + // go in `constr`, the constructor after desugaring. + + /** Does `tree' look like a reference to AnyVal? Temporary test before we have inline classes */ + def isAnyVal(tree: Tree): Boolean = tree match { + case Ident(tpnme.AnyVal) => true + case Select(qual, tpnme.AnyVal) => isScala(qual) + case _ => false + } + def isScala(tree: Tree): Boolean = tree match { + case Ident(nme.scala) => true + case Select(Ident(nme.ROOTPKG), nme.scala) => true + case _ => false + } + + def namePos = cdef.sourcePos.withSpan(cdef.nameSpan) + + val isObject = mods.is(Module) + val isCaseClass = mods.is(Case) && !isObject + val isCaseObject = mods.is(Case) && isObject + val isEnum = mods.isEnumClass && !mods.is(Module) + def isEnumCase = mods.isEnumCase + def isNonEnumCase = !isEnumCase && (isCaseClass || isCaseObject) + val isValueClass = parents.nonEmpty && isAnyVal(parents.head) + // This is not watertight, but `extends AnyVal` will be replaced by `inline` later. + + val originalTparams = constr1.leadingTypeParams + val originalVparamss = asTermOnly(constr1.trailingParamss) + lazy val derivedEnumParams = enumClass.typeParams.map(derivedTypeParamWithVariance) + val impliedTparams = + if (isEnumCase) { + val tparamReferenced = typeParamIsReferenced( + enumClass.typeParams, originalTparams, originalVparamss, parents) + if (originalTparams.isEmpty && (parents.isEmpty || tparamReferenced)) + derivedEnumParams.map(tdef => tdef.withFlags(tdef.mods.flags | PrivateLocal)) + else originalTparams + } + else originalTparams + + if mods.is(Trait) then + for vparams <- originalVparamss; vparam <- vparams do + if isByNameType(vparam.tpt) then + report.error(em"implementation restriction: traits cannot have by name parameters", vparam.srcPos) + + // Annotations on class _type_ parameters are set on the derived parameters + // but not on the constructor parameters. The reverse is true for + // annotations on class _value_ parameters. + val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) + val constrVparamss = + if (originalVparamss.isEmpty) { // ensure parameter list is non-empty + if (isCaseClass) + report.error(CaseClassMissingParamList(cdef), namePos) + ListOfNil + } + else if (isCaseClass && originalVparamss.head.exists(_.mods.isOneOf(GivenOrImplicit))) { + report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) + ListOfNil + } + else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) + val derivedTparams = + constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => + derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) + val derivedVparamss = + constrVparamss.nestedMap(vparam => + derivedTermParam(vparam).withAnnotations(Nil)) + + val constr = cpy.DefDef(constr1)(paramss = joinParams(constrTparams, constrVparamss)) + + val (normalizedBody, enumCases, enumCompanionRef) = { + // Add constructor type parameters and evidence implicit parameters + // to auxiliary constructors; set defaultGetters as a side effect. + def expandConstructor(tree: Tree) = tree match { + case ddef: DefDef if ddef.name.isConstructorName => + decompose( + defDef( + addEvidenceParams( + cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), + evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) + case stat => + stat + } + // The Identifiers defined by a case + def caseIds(tree: Tree): List[Ident] = tree match { + case tree: MemberDef => Ident(tree.name.toTermName) :: Nil + case PatDef(_, ids: List[Ident] @ unchecked, _, _) => ids + } + + val stats0 = impl.body.map(expandConstructor) + val stats = + if (ctx.owner eq defn.ScalaPackageClass) && defn.hasProblematicGetClass(className) then + stats0.filterConserve { + case ddef: DefDef => + ddef.name ne nme.getClass_ + case _ => + true + } + else + stats0 + + if (isEnum) { + val (enumCases, enumStats) = stats.partition(DesugarEnums.isEnumCase) + if (enumCases.isEmpty) + report.error(EnumerationsShouldNotBeEmpty(cdef), namePos) + else + enumCases.last.pushAttachment(DesugarEnums.DefinesEnumLookupMethods, ()) + val enumCompanionRef = TermRefTree() + val enumImport = + Import(enumCompanionRef, enumCases.flatMap(caseIds).map( + enumCase => + ImportSelector(enumCase.withSpan(enumCase.span.startPos)) + ) + ) + (enumImport :: enumStats, enumCases, enumCompanionRef) + } + else (stats, Nil, EmptyTree) + } + + def anyRef = ref(defn.AnyRefAlias.typeRef) + + val arity = constrVparamss.head.length + + val classTycon: Tree = TypeRefTree() // watching is set at end of method + + def appliedTypeTree(tycon: Tree, args: List[Tree]) = + (if (args.isEmpty) tycon else AppliedTypeTree(tycon, args)) + .withSpan(cdef.span.startPos) + + def isHK(tparam: Tree): Boolean = tparam match { + case TypeDef(_, LambdaTypeTree(tparams, body)) => true + case TypeDef(_, rhs: DerivedTypeTree) => isHK(rhs.watched) + case _ => false + } + + def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { + val targs = for (tparam <- tparams) yield { + val targ = refOfDef(tparam) + def fullyApplied(tparam: Tree): Tree = tparam match { + case TypeDef(_, LambdaTypeTree(tparams, body)) => + AppliedTypeTree(targ, tparams.map(_ => WildcardTypeBoundsTree())) + case TypeDef(_, rhs: DerivedTypeTree) => + fullyApplied(rhs.watched) + case _ => + targ + } + if (widenHK) fullyApplied(tparam) else targ + } + appliedTypeTree(tycon, targs) + } + + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { + case PostfixOp(_, Ident(tpnme.raw.STAR)) => true + case _ => false + } + + // a reference to the class type bound by `cdef`, with type parameters coming from the constructor + val classTypeRef = appliedRef(classTycon) + + // a reference to `enumClass`, with type parameters coming from the case constructor + lazy val enumClassTypeRef = + if (enumClass.typeParams.isEmpty) + enumClassRef + else if (originalTparams.isEmpty) + appliedRef(enumClassRef) + else { + report.error(TypedCaseDoesNotExplicitlyExtendTypedEnum(enumClass, cdef) + , cdef.srcPos.startPos) + appliedTypeTree(enumClassRef, constrTparams map (_ => anyRef)) + } + + // new C[Ts](paramss) + lazy val creatorExpr = + val vparamss = constrVparamss match + case (vparam :: _) :: _ if vparam.mods.is(Implicit) => // add a leading () to match class parameters + Nil :: constrVparamss + case _ => + if constrVparamss.nonEmpty && constrVparamss.forall { + case vparam :: _ => vparam.mods.is(Given) + case _ => false + } + then constrVparamss :+ Nil // add a trailing () to match class parameters + else constrVparamss + val nu = vparamss.foldLeft(makeNew(classTypeRef)) { (nu, vparams) => + val app = Apply(nu, vparams.map(refOfDef)) + vparams match { + case vparam :: _ if vparam.mods.is(Given) => app.setApplyKind(ApplyKind.Using) + case _ => app + } + } + ensureApplied(nu) + + val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags + + // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) + // def _1: T1 = this.p1 + // ... + // def _N: TN = this.pN (unless already given as valdef or parameterless defdef) + // def copy(p1: T1 = p1..., pN: TN = pN)(moreParams) = + // new C[...](p1, ..., pN)(moreParams) + val (caseClassMeths, enumScaffolding) = { + def syntheticProperty(name: TermName, tpt: Tree, rhs: Tree) = + DefDef(name, Nil, tpt, rhs).withMods(synthetic) + + def productElemMeths = + val caseParams = derivedVparamss.head.toArray + val selectorNamesInBody = normalizedBody.collect { + case vdef: ValDef if vdef.name.isSelectorName => + vdef.name + case ddef: DefDef if ddef.name.isSelectorName && ddef.paramss.isEmpty => + ddef.name + } + for i <- List.range(0, arity) + selName = nme.selectorName(i) + if (selName ne caseParams(i).name) && !selectorNamesInBody.contains(selName) + yield syntheticProperty(selName, caseParams(i).tpt, + Select(This(EmptyTypeIdent), caseParams(i).name)) + + def enumCaseMeths = + if isEnumCase then + val (ordinal, scaffolding) = nextOrdinal(className, CaseKind.Class, definesEnumLookupMethods(cdef)) + (ordinalMethLit(ordinal) :: Nil, scaffolding) + else (Nil, Nil) + def copyMeths = { + val hasRepeatedParam = constrVparamss.nestedExists { + case ValDef(_, tpt, _) => isRepeated(tpt) + } + if (mods.is(Abstract) || hasRepeatedParam) Nil // cannot have default arguments for repeated parameters, hence copy method is not issued + else { + val copyFirstParams = derivedVparamss.head.map(vparam => + cpy.ValDef(vparam)(rhs = refOfDef(vparam))) + val copyRestParamss = derivedVparamss.tail.nestedMap(vparam => + cpy.ValDef(vparam)(rhs = EmptyTree)) + DefDef( + nme.copy, + joinParams(derivedTparams, copyFirstParams :: copyRestParamss), + TypeTree(), + creatorExpr + ).withMods(Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags, constr1.mods.privateWithin)) :: Nil + } + } + + if isCaseClass then + val (enumMeths, enumScaffolding) = enumCaseMeths + (copyMeths ::: enumMeths ::: productElemMeths, enumScaffolding) + else (Nil, Nil) + } + + var parents1: List[untpd.Tree] = parents // !cc! need explicit type to make capture checking pass + if (isEnumCase && parents.isEmpty) + parents1 = enumClassTypeRef :: Nil + if (isNonEnumCase) + parents1 = parents1 :+ scalaDot(str.Product.toTypeName) :+ scalaDot(nme.Serializable.toTypeName) + if (isEnum) + parents1 = parents1 :+ ref(defn.EnumClass) + + // derived type classes of non-module classes go to their companions + val (clsDerived, companionDerived) = + if (mods.is(Module)) (impl.derived, Nil) else (Nil, impl.derived) + + // The thicket which is the desugared version of the companion object + // synthetic object C extends parentTpt derives class-derived { defs } + def companionDefs(parentTpt: Tree, defs: List[Tree]) = { + val mdefs = moduleDef( + ModuleDef( + className.toTermName, Template(emptyConstructor, parentTpt :: Nil, companionDerived, EmptyValDef, defs)) + .withMods(companionMods | Synthetic)) + .withSpan(cdef.span).toList + if (companionDerived.nonEmpty) + for (case modClsDef @ TypeDef(_, _) <- mdefs) + modClsDef.putAttachment(DerivingCompanion, impl.srcPos.startPos) + mdefs + } + + val companionMembers = defaultGetters ::: enumCases + + // The companion object definitions, if a companion is needed, Nil otherwise. + // companion definitions include: + // 1. If class is a case class case class C[Ts](p1: T1, ..., pN: TN)(moreParams): + // def apply[Ts](p1: T1, ..., pN: TN)(moreParams) = new C[Ts](p1, ..., pN)(moreParams) (unless C is abstract) + // def unapply[Ts]($1: C[Ts]) = $1 // if not repeated + // def unapplySeq[Ts]($1: C[Ts]) = $1 // if repeated + // 2. The default getters of the constructor + // The parent of the companion object of a non-parameterized case class + // (T11, ..., T1N) => ... => (TM1, ..., TMN) => C + // For all other classes, the parent is AnyRef. + val companions = + if (isCaseClass) { + val applyMeths = + if (mods.is(Abstract)) Nil + else { + val appMods = + Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags).withPrivateWithin(constr1.mods.privateWithin) + val appParamss = + derivedVparamss.nestedZipWithConserve(constrVparamss)((ap, cp) => + ap.withMods(ap.mods | (cp.mods.flags & HasDefault))) + DefDef(nme.apply, joinParams(derivedTparams, appParamss), TypeTree(), creatorExpr) + .withMods(appMods) :: Nil + } + val unapplyMeth = { + val hasRepeatedParam = constrVparamss.head.exists { + case ValDef(_, tpt, _) => isRepeated(tpt) + } + val methName = if (hasRepeatedParam) nme.unapplySeq else nme.unapply + val unapplyParam = makeSyntheticParameter(tpt = classTypeRef) + val unapplyRHS = if (arity == 0) Literal(Constant(true)) else Ident(unapplyParam.name) + val unapplyResTp = if (arity == 0) Literal(Constant(true)) else TypeTree() + DefDef( + methName, + joinParams(derivedTparams, (unapplyParam :: Nil) :: Nil), + unapplyResTp, + unapplyRHS + ).withMods(synthetic) + } + val toStringMeth = + DefDef(nme.toString_, Nil, TypeTree(), Literal(Constant(className.toString))).withMods(Modifiers(Override | Synthetic)) + + companionDefs(anyRef, applyMeths ::: unapplyMeth :: toStringMeth :: companionMembers) + } + else if (companionMembers.nonEmpty || companionDerived.nonEmpty || isEnum) + companionDefs(anyRef, companionMembers) + else if (isValueClass) + companionDefs(anyRef, Nil) + else Nil + + enumCompanionRef match { + case ref: TermRefTree => // have the enum import watch the companion object + val (modVal: ValDef) :: _ = companions: @unchecked + ref.watching(modVal) + case _ => + } + + // For an implicit class C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, .., pMN: TMN), the method + // synthetic implicit C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, ..., pMN: TMN): C[Ts] = + // new C[Ts](p11, ..., p1N) ... (pM1, ..., pMN) = + val implicitWrappers = + if (!mods.isOneOf(GivenOrImplicit)) + Nil + else if (ctx.owner.is(Package)) { + report.error(TopLevelImplicitClass(cdef), cdef.srcPos) + Nil + } + else if (mods.is(Trait)) { + report.error(TypesAndTraitsCantBeImplicit(), cdef.srcPos) + Nil + } + else if (isCaseClass) { + report.error(ImplicitCaseClass(cdef), cdef.srcPos) + Nil + } + else if (arity != 1 && !mods.is(Given)) { + report.error(ImplicitClassPrimaryConstructorArity(), cdef.srcPos) + Nil + } + else { + val defParamss = constrVparamss match { + case Nil :: paramss => + paramss // drop leading () that got inserted by class + // TODO: drop this once we do not silently insert empty class parameters anymore + case paramss => paramss + } + // implicit wrapper is typechecked in same scope as constructor, so + // we can reuse the constructor parameters; no derived params are needed. + DefDef( + className.toTermName, joinParams(constrTparams, defParamss), + classTypeRef, creatorExpr) + .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | Final) + .withSpan(cdef.span) :: Nil + } + + val self1 = { + val selfType = if (self.tpt.isEmpty) classTypeRef else self.tpt + if (self.isEmpty) self + else cpy.ValDef(self)(tpt = selfType).withMods(self.mods | SelfName) + } + + val cdef1 = addEnumFlags { + val tparamAccessors = { + val impliedTparamsIt = impliedTparams.iterator + derivedTparams.map(_.withMods(impliedTparamsIt.next().mods)) + } + val caseAccessor = if (isCaseClass) CaseAccessor else EmptyFlags + val vparamAccessors = { + val originalVparamsIt = originalVparamss.iterator.flatten + derivedVparamss match { + case first :: rest => + first.map(_.withMods(originalVparamsIt.next().mods | caseAccessor)) ++ + rest.flatten.map(_.withMods(originalVparamsIt.next().mods)) + case _ => + Nil + } + } + if mods.isAllOf(Given | Inline | Transparent) then + report.error("inline given instances cannot be trasparent", cdef) + val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + cpy.TypeDef(cdef: TypeDef)( + name = className, + rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, + tparamAccessors ::: vparamAccessors ::: normalizedBody ::: caseClassMeths) + ).withMods(classMods) + } + + // install the watch on classTycon + classTycon match { + case tycon: DerivedTypeTree => tycon.watching(cdef1) + case _ => + } + + flatTree(cdef1 :: companions ::: implicitWrappers ::: enumScaffolding) + }.showing(i"desugared: $cdef --> $result", Printers.desugar) + + /** Expand + * + * package object name { body } + * + * to: + * + * package name { + * object `package` { body } + * } + */ + def packageModuleDef(mdef: ModuleDef)(using Context): Tree = + val impl = mdef.impl + val mods = mdef.mods + val moduleName = normalizeName(mdef, impl).asTermName + if mods.is(Package) then + checkPackageName(mdef) + PackageDef(Ident(moduleName), + cpy.ModuleDef(mdef)(nme.PACKAGE, impl).withMods(mods &~ Package) :: Nil) + else + mdef + + /** Expand + * + * object name extends parents { self => body } + * + * to: + * + * val name: name$ = New(name$) + * final class name$ extends parents { self: name.type => body } + */ + def moduleDef(mdef: ModuleDef)(using Context): Tree = { + val impl = mdef.impl + val mods = mdef.mods + val moduleName = normalizeName(mdef, impl).asTermName + def isEnumCase = mods.isEnumCase + Checking.checkWellFormedModule(mdef) + + if (mods.is(Package)) + packageModuleDef(mdef) + else if (isEnumCase) { + typeParamIsReferenced(enumClass.typeParams, Nil, Nil, impl.parents) + // used to check there are no illegal references to enum's type parameters in parents + expandEnumModule(moduleName, impl, mods, definesEnumLookupMethods(mdef), mdef.span) + } + else { + val clsName = moduleName.moduleClassName + val clsRef = Ident(clsName) + val modul = ValDef(moduleName, clsRef, New(clsRef, Nil)) + .withMods(mods.toTermFlags & RetainedModuleValFlags | ModuleValCreationFlags) + .withSpan(mdef.span.startPos) + val ValDef(selfName, selfTpt, _) = impl.self + val selfMods = impl.self.mods + if (!selfTpt.isEmpty) report.error(ObjectMayNotHaveSelfType(mdef), impl.self.srcPos) + val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(moduleName)), impl.self.rhs) + .withMods(selfMods) + .withSpan(impl.self.span.orElse(impl.span.startPos)) + val clsTmpl = cpy.Template(impl)(self = clsSelf, body = impl.body) + val cls = TypeDef(clsName, clsTmpl) + .withMods(mods.toTypeFlags & RetainedModuleClassFlags | ModuleClassCreationFlags) + .withEndMarker(copyFrom = mdef) // copy over the end marker position to the module class def + Thicket(modul, classDef(cls).withSpan(mdef.span)) + } + } + + def extMethod(mdef: DefDef, extParamss: List[ParamClause])(using Context): DefDef = + cpy.DefDef(mdef)( + name = normalizeName(mdef, mdef.tpt).asTermName, + paramss = + if mdef.name.isRightAssocOperatorName then + val (typaramss, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters + + paramss match + case params :: paramss1 => // `params` must have a single parameter and without `given` flag + + def badRightAssoc(problem: String) = + report.error(em"right-associative extension method $problem", mdef.srcPos) + extParamss ++ mdef.paramss + + params match + case ValDefs(vparam :: Nil) => + if !vparam.mods.is(Given) then + // we merge the extension parameters with the method parameters, + // swapping the operator arguments: + // e.g. + // extension [A](using B)(c: C)(using D) + // def %:[E](f: F)(g: G)(using H): Res = ??? + // will be encoded as + // def %:[A](using B)[E](f: F)(c: C)(using D)(g: G)(using H): Res = ??? + val (leadingUsing, otherExtParamss) = extParamss.span(isUsingOrTypeParamClause) + leadingUsing ::: typaramss ::: params :: otherExtParamss ::: paramss1 + else + badRightAssoc("cannot start with using clause") + case _ => + badRightAssoc("must start with a single parameter") + case _ => + // no value parameters, so not an infix operator. + extParamss ++ mdef.paramss + else + extParamss ++ mdef.paramss + ).withMods(mdef.mods | ExtensionMethod) + + /** Transform extension construct to list of extension methods */ + def extMethods(ext: ExtMethods)(using Context): Tree = flatTree { + ext.methods map { + case exp: Export => exp + case mdef: DefDef => defDef(extMethod(mdef, ext.paramss)) + } + } + /** Transforms + * + * type t >: Low <: Hi + * to + * + * @patternType type $T >: Low <: Hi + * + * if the type has a pattern variable name + */ + def quotedPatternTypeDef(tree: TypeDef)(using Context): TypeDef = { + assert(ctx.mode.is(Mode.QuotedPattern)) + if tree.name.isVarPattern && !tree.isBackquoted then + val patternTypeAnnot = New(ref(defn.QuotedRuntimePatterns_patternTypeAnnot.typeRef)).withSpan(tree.span) + val mods = tree.mods.withAddedAnnotation(patternTypeAnnot) + tree.withMods(mods) + else if tree.name.startsWith("$") && !tree.isBackquoted then + report.error( + """Quoted pattern variable names starting with $ are not supported anymore. + |Use lower cases type pattern name instead. + |""".stripMargin, + tree.srcPos) + tree + else tree + } + + def checkPackageName(mdef: ModuleDef | PackageDef)(using Context): Unit = + + def check(name: Name, errSpan: Span): Unit = name match + case name: SimpleName if !errSpan.isSynthetic && name.exists(Chars.willBeEncoded) => + report.warning(em"The package name `$name` will be encoded on the classpath, and can lead to undefined behaviour.", mdef.source.atSpan(errSpan)) + case _ => + + def loop(part: RefTree): Unit = part match + case part @ Ident(name) => check(name, part.span) + case part @ Select(qual: RefTree, name) => + check(name, part.nameSpan) + loop(qual) + case _ => + + mdef match + case pdef: PackageDef => loop(pdef.pid) + case mdef: ModuleDef if mdef.mods.is(Package) => check(mdef.name, mdef.nameSpan) + case _ => + end checkPackageName + + /** The normalized name of `mdef`. This means + * 1. Check that the name does not redefine a Scala core class. + * If it does redefine, issue an error and return a mangled name instead + * of the original one. + * 2. If the name is missing (this can be the case for instance definitions), + * invent one instead. + */ + def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { + var name = mdef.name + if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) + def errPos = mdef.source.atSpan(mdef.nameSpan) + if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { + val kind = if (name.isTypeName) "class" else "object" + report.error(IllegalRedefinitionOfStandardKind(kind, name), errPos) + name = name.errorName + } + name + } + + /** Invent a name for an anonympus given of type or template `impl`. */ + def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = + val str = impl match + case impl: Template => + if impl.parents.isEmpty then + report.error(AnonymousInstanceCannotBeEmpty(impl), impl.srcPos) + nme.ERROR.toString + else + impl.parents.map(inventTypeName(_)).mkString("given_", "_", "") + case impl: Tree => + "given_" ++ inventTypeName(impl) + str.toTermName.asSimpleName + + private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { + private def extractArgs(args: List[Tree])(using Context): String = + args.map(argNameExtractor.apply("", _)).mkString("_") + override def apply(x: String, tree: Tree)(using Context): String = + if (x.isEmpty) + tree match { + case Select(pre, nme.CONSTRUCTOR) => foldOver(x, pre) + case tree: RefTree => + if tree.name.isTypeName then tree.name.toString + else s"${tree.name}_type" + case tree: TypeDef => tree.name.toString + case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => + s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" + case InfixOp(left, op, right) => + if followArgs then s"${op.name}_${extractArgs(List(left, right))}" + else op.name.toString + case tree: LambdaTypeTree => + apply(x, tree.body) + case tree: Tuple => + extractArgs(tree.trees) + case tree: Function if tree.args.nonEmpty => + if followArgs then s"${extractArgs(tree.args)}_to_${apply("", tree.body)}" + else "Function" + case _ => foldOver(x, tree) + } + else x + } + private val typeNameExtractor = NameExtractor(followArgs = true) + private val argNameExtractor = NameExtractor(followArgs = false) + + private def inventTypeName(tree: Tree)(using Context): String = typeNameExtractor("", tree) + + /**This will check if this def tree is marked to define enum lookup methods, + * this is not recommended to call more than once per tree + */ + private def definesEnumLookupMethods(ddef: DefTree): Boolean = + ddef.removeAttachment(DefinesEnumLookupMethods).isDefined + + /** val p1, ..., pN: T = E + * ==> + * makePatDef[[val p1: T1 = E]]; ...; makePatDef[[val pN: TN = E]] + * + * case e1, ..., eN + * ==> + * expandSimpleEnumCase([case e1]); ...; expandSimpleEnumCase([case eN]) + */ + def patDef(pdef: PatDef)(using Context): Tree = flatTree { + val PatDef(mods, pats, tpt, rhs) = pdef + if mods.isEnumCase then + def expand(id: Ident, definesLookups: Boolean) = + expandSimpleEnumCase(id.name.asTermName, mods, definesLookups, + Span(id.span.start, id.span.end, id.span.start)) + + val ids = pats.asInstanceOf[List[Ident]] + if definesEnumLookupMethods(pdef) then + ids.init.map(expand(_, false)) ::: expand(ids.last, true) :: Nil + else + ids.map(expand(_, false)) + else { + val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) + pats1 map (makePatDef(pdef, mods, _, rhs)) + } + } + + /** The selector of a match, which depends of the given `checkMode`. + * @param sel the original selector + * @return if `checkMode` is + * - None : sel @unchecked + * - Exhaustive : sel + * - IrrefutablePatDef, + * IrrefutableGenFrom: sel with attachment `CheckIrrefutable -> checkMode` + */ + def makeSelector(sel: Tree, checkMode: MatchCheck)(using Context): Tree = + checkMode match + case MatchCheck.None => + Annotated(sel, New(ref(defn.UncheckedAnnot.typeRef))) + + case MatchCheck.Exhaustive => + sel + + case MatchCheck.IrrefutablePatDef | MatchCheck.IrrefutableGenFrom => + // TODO: use `pushAttachment` and investigate duplicate attachment + sel.withAttachment(CheckIrrefutable, checkMode) + sel + end match + + /** If `pat` is a variable pattern, + * + * val/var/lazy val p = e + * + * Otherwise, in case there is exactly one variable x_1 in pattern + * val/var/lazy val p = e ==> val/var/lazy val x_1 = (e: @unchecked) match (case p => (x_1)) + * + * in case there are zero or more than one variables in pattern + * val/var/lazy p = e ==> private[this] synthetic [lazy] val t$ = (e: @unchecked) match (case p => (x_1, ..., x_N)) + * val/var/def x_1 = t$._1 + * ... + * val/var/def x_N = t$._N + * If the original pattern variable carries a type annotation, so does the corresponding + * ValDef or DefDef. + */ + def makePatDef(original: Tree, mods: Modifiers, pat: Tree, rhs: Tree)(using Context): Tree = pat match { + case IdPattern(id, tpt) => + val id1 = + if id.name == nme.WILDCARD + then cpy.Ident(id)(WildcardParamName.fresh()) + else id + derivedValDef(original, id1, tpt, rhs, mods) + case _ => + + def filterWildcardGivenBinding(givenPat: Bind): Boolean = + givenPat.name != nme.WILDCARD + + def errorOnGivenBinding(bind: Bind)(using Context): Boolean = + report.error( + em"""${hl("given")} patterns are not allowed in a ${hl("val")} definition, + |please bind to an identifier and use an alias given.""", bind) + false + + def isTuplePattern(arity: Int): Boolean = pat match { + case Tuple(pats) if pats.size == arity => + pats.forall(isVarPattern) + case _ => false + } + val isMatchingTuple: Tree => Boolean = { + case Tuple(es) => isTuplePattern(es.length) + case _ => false + } + + // We can only optimize `val pat = if (...) e1 else e2` if: + // - `e1` and `e2` are both tuples of arity N + // - `pat` is a tuple of N variables or wildcard patterns like `(x1, x2, ..., xN)` + val tupleOptimizable = forallResults(rhs, isMatchingTuple) + + val inAliasGenerator = original match + case _: GenAlias => true + case _ => false + + val vars = + if (tupleOptimizable) // include `_` + pat match + case Tuple(pats) => pats.map { case id: Ident => id -> TypeTree() } + else + getVariables( + tree = pat, + shouldAddGiven = + if inAliasGenerator then + filterWildcardGivenBinding + else + errorOnGivenBinding + ) // no `_` + + val ids = for ((named, _) <- vars) yield Ident(named.name) + val matchExpr = + if (tupleOptimizable) rhs + else + val caseDef = CaseDef(pat, EmptyTree, makeTuple(ids)) + Match(makeSelector(rhs, MatchCheck.IrrefutablePatDef), caseDef :: Nil) + vars match { + case Nil if !mods.is(Lazy) => + matchExpr + case (named, tpt) :: Nil => + derivedValDef(original, named, tpt, matchExpr, mods) + case _ => + val tmpName = UniqueName.fresh() + val patMods = + mods & Lazy | Synthetic | (if (ctx.owner.isClass) PrivateLocal else EmptyFlags) + val firstDef = + ValDef(tmpName, TypeTree(), matchExpr) + .withSpan(pat.span.union(rhs.span)).withMods(patMods) + val useSelectors = vars.length <= 22 + def selector(n: Int) = + if useSelectors then Select(Ident(tmpName), nme.selectorName(n)) + else Apply(Select(Ident(tmpName), nme.apply), Literal(Constant(n)) :: Nil) + val restDefs = + for (((named, tpt), n) <- vars.zipWithIndex if named.name != nme.WILDCARD) + yield + if mods.is(Lazy) then + DefDef(named.name.asTermName, Nil, tpt, selector(n)) + .withMods(mods &~ Lazy) + .withSpan(named.span) + else + valDef( + ValDef(named.name.asTermName, tpt, selector(n)) + .withMods(mods) + .withSpan(named.span) + ) + flatTree(firstDef :: restDefs) + } + } + + /** Expand variable identifier x to x @ _ */ + def patternVar(tree: Tree)(using Context): Bind = { + val Ident(name) = unsplice(tree): @unchecked + Bind(name, Ident(nme.WILDCARD)).withSpan(tree.span) + } + + /** The type of tests that check whether a MemberDef is OK for some flag. + * The test succeeds if the partial function is defined and returns true. + */ + type MemberDefTest = PartialFunction[MemberDef, Boolean] + + val legalOpaque: MemberDefTest = { + case TypeDef(_, rhs) => + def rhsOK(tree: Tree): Boolean = tree match { + case bounds: TypeBoundsTree => !bounds.alias.isEmpty + case _: Template | _: MatchTypeTree => false + case LambdaTypeTree(_, body) => rhsOK(body) + case _ => true + } + rhsOK(rhs) + } + + def checkOpaqueAlias(tree: MemberDef)(using Context): MemberDef = + def check(rhs: Tree): MemberDef = rhs match + case bounds: TypeBoundsTree if bounds.alias.isEmpty => + report.error(em"opaque type must have a right-hand side", tree.srcPos) + tree.withMods(tree.mods.withoutFlags(Opaque)) + case LambdaTypeTree(_, body) => check(body) + case _ => tree + if !tree.mods.is(Opaque) then tree + else tree match + case TypeDef(_, rhs) => check(rhs) + case _ => tree + + /** Check that modifiers are legal for the definition `tree`. + * Right now, we only check for `opaque`. TODO: Move other modifier checks here. + */ + def checkModifiers(tree: Tree)(using Context): Tree = tree match { + case tree: MemberDef => + var tested: MemberDef = tree + def checkApplicable(flag: Flag, test: MemberDefTest): MemberDef = + if (tested.mods.is(flag) && !test.applyOrElse(tree, (md: MemberDef) => false)) { + report.error(ModifierNotAllowedForDefinition(flag), tree.srcPos) + tested.withMods(tested.mods.withoutFlags(flag)) + } else tested + tested = checkOpaqueAlias(tested) + tested = checkApplicable(Opaque, legalOpaque) + tested + case _ => + tree + } + + def defTree(tree: Tree)(using Context): Tree = + checkModifiers(tree) match { + case tree: ValDef => valDef(tree) + case tree: TypeDef => + if (tree.isClassDef) classDef(tree) + else if (ctx.mode.is(Mode.QuotedPattern)) quotedPatternTypeDef(tree) + else tree + case tree: DefDef => + if (tree.name.isConstructorName) tree // was already handled by enclosing classDef + else defDef(tree) + case tree: ModuleDef => moduleDef(tree) + case tree: PatDef => patDef(tree) + } + + /** { stats; } + * ==> + * { stats; () } + */ + def block(tree: Block)(using Context): Block = tree.expr match { + case EmptyTree => + cpy.Block(tree)(tree.stats, + unitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) + case _ => + tree + } + + /** Translate infix operation expression + * + * l op r ==> l.op(r) if op is left-associative + * ==> r.op(l) if op is right-associative + */ + def binop(left: Tree, op: Ident, right: Tree)(using Context): Apply = { + def assignToNamedArg(arg: Tree) = arg match { + case Assign(Ident(name), rhs) => cpy.NamedArg(arg)(name, rhs) + case _ => arg + } + def makeOp(fn: Tree, arg: Tree, selectPos: Span) = + val sel = Select(fn, op.name).withSpan(selectPos) + if (left.sourcePos.endLine < op.sourcePos.startLine) + sel.pushAttachment(MultiLineInfix, ()) + arg match + case Parens(arg) => + Apply(sel, assignToNamedArg(arg) :: Nil) + case Tuple(args) if args.exists(_.isInstanceOf[Assign]) => + Apply(sel, args.mapConserve(assignToNamedArg)) + case Tuple(args) => + Apply(sel, arg :: Nil).setApplyKind(ApplyKind.InfixTuple) + case _ => + Apply(sel, arg :: Nil) + + if op.name.isRightAssocOperatorName then + makeOp(right, left, Span(op.span.start, right.span.end)) + else + makeOp(left, right, Span(left.span.start, op.span.end, op.span.start)) + } + + /** Translate throws type `A throws E1 | ... | En` to + * $throws[... $throws[A, E1] ... , En]. + */ + def throws(tpt: Tree, op: Ident, excepts: Tree)(using Context): AppliedTypeTree = excepts match + case Parens(excepts1) => + throws(tpt, op, excepts1) + case InfixOp(l, bar @ Ident(tpnme.raw.BAR), r) => + throws(throws(tpt, op, l), bar, r) + case e => + AppliedTypeTree( + TypeTree(defn.throwsAlias.typeRef).withSpan(op.span), tpt :: excepts :: Nil) + + /** Translate tuple expressions of arity <= 22 + * + * () ==> () + * (t) ==> t + * (t1, ..., tN) ==> TupleN(t1, ..., tN) + */ + def smallTuple(tree: Tuple)(using Context): Tree = { + val ts = tree.trees + val arity = ts.length + assert(arity <= Definitions.MaxTupleArity) + def tupleTypeRef = defn.TupleType(arity).nn + if (arity == 0) + if (ctx.mode is Mode.Type) TypeTree(defn.UnitType) else unitLiteral + else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts) + else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts) + } + + private def isTopLevelDef(stat: Tree)(using Context): Boolean = stat match + case _: ValDef | _: PatDef | _: DefDef | _: Export | _: ExtMethods => true + case stat: ModuleDef => + stat.mods.isOneOf(GivenOrImplicit) + case stat: TypeDef => + !stat.isClassDef || stat.mods.isOneOf(GivenOrImplicit) + case _ => + false + + /** Assuming `src` contains top-level definition, returns the name that should + * be using for the package object that will wrap them. + */ + def packageObjectName(src: SourceFile): TermName = + val fileName = src.file.name + val sourceName = fileName.take(fileName.lastIndexOf('.')) + (sourceName ++ str.TOPLEVEL_SUFFIX).toTermName + + /** Group all definitions that can't be at the toplevel in + * an object named `$package` where `` is the name of the source file. + * Definitions that can't be at the toplevel are: + * + * - all pattern, value and method definitions + * - non-class type definitions + * - implicit classes and objects + * - "companion objects" of wrapped type definitions + * (i.e. objects having the same name as a wrapped type) + */ + def packageDef(pdef: PackageDef)(using Context): PackageDef = { + checkPackageName(pdef) + val wrappedTypeNames = pdef.stats.collectCC { + case stat: TypeDef if isTopLevelDef(stat) => stat.name + } + def inPackageObject(stat: Tree) = + isTopLevelDef(stat) || { + stat match + case stat: ModuleDef => + wrappedTypeNames.contains(stat.name.stripModuleClassSuffix.toTypeName) + case _ => + false + } + val (nestedStats, topStats) = pdef.stats.partition(inPackageObject) + if (nestedStats.isEmpty) pdef + else { + val name = packageObjectName(ctx.source) + val grouped = + ModuleDef(name, Template(emptyConstructor, Nil, Nil, EmptyValDef, nestedStats)) + .withMods(Modifiers(Synthetic)) + cpy.PackageDef(pdef)(pdef.pid, topStats :+ grouped) + } + } + + /** Make closure corresponding to function. + * params => body + * ==> + * def $anonfun(params) = body + * Closure($anonfun) + */ + def makeClosure(params: List[ValDef], body: Tree, tpt: Tree | Null = null, isContextual: Boolean, span: Span)(using Context): Block = + Block( + DefDef(nme.ANON_FUN, params :: Nil, if (tpt == null) TypeTree() else tpt, body) + .withSpan(span) + .withMods(synthetic | Artifact), + Closure(Nil, Ident(nme.ANON_FUN), if (isContextual) ContextualEmptyTree else EmptyTree)) + + /** If `nparams` == 1, expand partial function + * + * { cases } + * ==> + * x$1 => (x$1 @unchecked?) match { cases } + * + * If `nparams` != 1, expand instead to + * + * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked?) match { cases } + */ + def makeCaseLambda(cases: List[CaseDef], checkMode: MatchCheck, nparams: Int = 1)(using Context): Function = { + val params = (1 to nparams).toList.map(makeSyntheticParameter(_)) + val selector = makeTuple(params.map(p => Ident(p.name))) + Function(params, Match(makeSelector(selector, checkMode), cases)) + } + + /** Map n-ary function `(x1: T1, ..., xn: Tn) => body` where n != 1 to unary function as follows: + * + * (x$1: (T1, ..., Tn)) => { + * def x1: T1 = x$1._1 + * ... + * def xn: Tn = x$1._n + * body + * } + * + * or if `isGenericTuple` + * + * (x$1: (T1, ... Tn) => { + * def x1: T1 = x$1.apply(0) + * ... + * def xn: Tn = x$1.apply(n-1) + * body + * } + * + * If some of the Ti's are absent, omit the : (T1, ..., Tn) type ascription + * in the selector. + */ + def makeTupledFunction(params: List[ValDef], body: Tree, isGenericTuple: Boolean)(using Context): Tree = { + val param = makeSyntheticParameter( + tpt = + if params.exists(_.tpt.isEmpty) then TypeTree() + else Tuple(params.map(_.tpt))) + def selector(n: Int) = + if (isGenericTuple) Apply(Select(refOfDef(param), nme.apply), Literal(Constant(n))) + else Select(refOfDef(param), nme.selectorName(n)) + val vdefs = + params.zipWithIndex.map { + case (param, idx) => + ValDef(param.name, param.tpt, selector(idx)) + .withSpan(param.span) + .withAttachment(UntupledParam, ()) + .withFlags(Synthetic) + } + Function(param :: Nil, Block(vdefs, body)) + } + + /** Convert a tuple pattern with given `elems` to a sequence of `ValDefs`, + * skipping elements that are not convertible. + */ + def patternsToParams(elems: List[Tree])(using Context): List[ValDef] = + def toParam(elem: Tree, tpt: Tree): Tree = + elem match + case Annotated(elem1, _) => toParam(elem1, tpt) + case Typed(elem1, tpt1) => toParam(elem1, tpt1) + case Ident(id: TermName) => ValDef(id, tpt, EmptyTree).withFlags(Param) + case _ => EmptyTree + elems.map(param => toParam(param, TypeTree()).withSpan(param.span)).collect { + case vd: ValDef => vd + } + + def makeContextualFunction(formals: List[Tree], body: Tree, isErased: Boolean)(using Context): Function = { + val mods = if (isErased) Given | Erased else Given + val params = makeImplicitParameters(formals, mods) + FunctionWithMods(params, body, Modifiers(mods)) + } + + private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { + val vdef = ValDef(named.name.asTermName, tpt, rhs) + .withMods(mods) + .withSpan(original.span.withPoint(named.span.start)) + val mayNeedSetter = valDef(vdef) + mayNeedSetter + } + + private def derivedDefDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit src: SourceFile) = + DefDef(named.name.asTermName, Nil, tpt, rhs) + .withMods(mods) + .withSpan(original.span.withPoint(named.span.start)) + + /** Main desugaring method */ + def apply(tree: Tree, pt: Type = NoType)(using Context): Tree = { + + /** Create tree for for-comprehension `` or + * `` where mapName and flatMapName are chosen + * corresponding to whether this is a for-do or a for-yield. + * The creation performs the following rewrite rules: + * + * 1. + * + * for (P <- G) E ==> G.foreach (P => E) + * + * Here and in the following (P => E) is interpreted as the function (P => E) + * if P is a variable pattern and as the partial function { case P => E } otherwise. + * + * 2. + * + * for (P <- G) yield E ==> G.map (P => E) + * + * 3. + * + * for (P_1 <- G_1; P_2 <- G_2; ...) ... + * ==> + * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) + * + * 4. + * + * for (P <- G; E; ...) ... + * => + * for (P <- G.filter (P => E); ...) ... + * + * 5. For any N: + * + * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) + * ==> + * for (TupleN(P_1, P_2, ... P_N) <- + * for (x_1 @ P_1 <- G) yield { + * val x_2 @ P_2 = E_2 + * ... + * val x_N & P_N = E_N + * TupleN(x_1, ..., x_N) + * } ...) + * + * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated + * and the variable constituting P_i is used instead of x_i + * + * @param mapName The name to be used for maps (either map or foreach) + * @param flatMapName The name to be used for flatMaps (either flatMap or foreach) + * @param enums The enumerators in the for expression + * @param body The body of the for expression + */ + def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Tree], body: Tree): Tree = trace(i"make for ${ForYield(enums, body)}", show = true) { + + /** Let `pat` be `gen`'s pattern. Make a function value `pat => body`. + * If `pat` is a var pattern `id: T` then this gives `(id: T) => body`. + * Otherwise this gives `{ case pat => body }`, where `pat` is checked to be + * irrefutable if `gen`'s checkMode is GenCheckMode.Check. + */ + def makeLambda(gen: GenFrom, body: Tree): Tree = gen.pat match { + case IdPattern(named, tpt) if gen.checkMode != GenCheckMode.FilterAlways => + Function(derivedValDef(gen.pat, named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body) + case _ => + val matchCheckMode = + if (gen.checkMode == GenCheckMode.Check || gen.checkMode == GenCheckMode.CheckAndFilter) MatchCheck.IrrefutableGenFrom + else MatchCheck.None + makeCaseLambda(CaseDef(gen.pat, EmptyTree, body) :: Nil, matchCheckMode) + } + + /** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap + * it in a Bind with a fresh name. Return the transformed pattern, and the identifier + * that refers to the bound variable for the pattern. Wildcard Binds are + * also replaced by Binds with fresh names. + */ + def makeIdPat(pat: Tree): (Tree, Ident) = pat match { + case bind @ Bind(name, pat1) => + if name == nme.WILDCARD then + val name = UniqueName.fresh() + (cpy.Bind(pat)(name, pat1).withMods(bind.mods), Ident(name)) + else (pat, Ident(name)) + case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => (id, id) + case Typed(id: Ident, _) if isVarPattern(id) && id.name != nme.WILDCARD => (pat, id) + case _ => + val name = UniqueName.fresh() + (Bind(name, pat), Ident(name)) + } + + /** Make a pattern filter: + * rhs.withFilter { case pat => true case _ => false } + * + * On handling irrefutable patterns: + * The idea is to wait until the pattern matcher sees a call + * + * xs withFilter { cases } + * + * where cases can be proven to be refutable i.e. cases would be + * equivalent to { case _ => true } + * + * In that case, compile to + * + * xs withFilter alwaysTrue + * + * where `alwaysTrue` is a predefined function value: + * + * val alwaysTrue: Any => Boolean = true + * + * In the libraries operations can take advantage of alwaysTrue to shortcircuit the + * withFilter call. + * + * def withFilter(f: Elem => Boolean) = + * if (f eq alwaysTrue) this // or rather identity filter monadic applied to this + * else real withFilter + */ + def makePatFilter(rhs: Tree, pat: Tree): Tree = { + val cases = List( + CaseDef(pat, EmptyTree, Literal(Constant(true))), + CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))) + Apply(Select(rhs, nme.withFilter), makeCaseLambda(cases, MatchCheck.None)) + } + + /** Is pattern `pat` irrefutable when matched against `rhs`? + * We only can do a simple syntactic check here; a more refined check + * is done later in the pattern matcher (see discussion in @makePatFilter). + */ + def isIrrefutable(pat: Tree, rhs: Tree): Boolean = { + def matchesTuple(pats: List[Tree], rhs: Tree): Boolean = rhs match { + case Tuple(trees) => (pats corresponds trees)(isIrrefutable) + case Parens(rhs1) => matchesTuple(pats, rhs1) + case Block(_, rhs1) => matchesTuple(pats, rhs1) + case If(_, thenp, elsep) => matchesTuple(pats, thenp) && matchesTuple(pats, elsep) + case Match(_, cases) => cases forall (matchesTuple(pats, _)) + case CaseDef(_, _, rhs1) => matchesTuple(pats, rhs1) + case Throw(_) => true + case _ => false + } + pat match { + case Bind(_, pat1) => isIrrefutable(pat1, rhs) + case Parens(pat1) => isIrrefutable(pat1, rhs) + case Tuple(pats) => matchesTuple(pats, rhs) + case _ => isVarPattern(pat) + } + } + + /** Is `pat` of the form `x`, `x T`, or `given T`? when used as the lhs of a generator, + * these are all considered irrefutable. + */ + def isVarBinding(pat: Tree): Boolean = pat match + case pat @ Bind(_, pat1) if pat.mods.is(Given) => isVarBinding(pat1) + case IdPattern(_) => true + case _ => false + + def needsNoFilter(gen: GenFrom): Boolean = gen.checkMode match + case GenCheckMode.FilterAlways => false // pattern was prefixed by `case` + case GenCheckMode.FilterNow | GenCheckMode.CheckAndFilter => isVarBinding(gen.pat) || isIrrefutable(gen.pat, gen.expr) + case GenCheckMode.Check => true + case GenCheckMode.Ignore => true + + /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when + * matched against `rhs`. + */ + def rhsSelect(gen: GenFrom, name: TermName) = { + val rhs = if (needsNoFilter(gen)) gen.expr else makePatFilter(gen.expr, gen.pat) + Select(rhs, name) + } + + enums match { + case (gen: GenFrom) :: Nil => + Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) + case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => + val cont = makeFor(mapName, flatMapName, rest, body) + Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) + case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => + val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) + val pats = valeqs map { case GenAlias(pat, _) => pat } + val rhss = valeqs map { case GenAlias(_, rhs) => rhs } + val (defpat0, id0) = makeIdPat(gen.pat) + val (defpats, ids) = (pats map makeIdPat).unzip + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => + val mods = defpat match + case defTree: DefTree => defTree.mods + case _ => Modifiers() + makePatDef(valeq, mods, defpat, rhs) + } + val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) + val allpats = gen.pat :: pats + val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, vfrom1 :: rest1, body) + case (gen: GenFrom) :: test :: rest => + val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) + val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Ignore) + makeFor(mapName, flatMapName, genFrom :: rest, body) + case _ => + EmptyTree //may happen for erroneous input + } + } + + def makePolyFunction(targs: List[Tree], body: Tree, pt: Type): Tree = body match { + case Parens(body1) => + makePolyFunction(targs, body1, pt) + case Block(Nil, body1) => + makePolyFunction(targs, body1, pt) + case Function(vargs, res) => + assert(targs.nonEmpty) + // TODO: Figure out if we need a `PolyFunctionWithMods` instead. + val mods = body match { + case body: FunctionWithMods => body.mods + case _ => untpd.EmptyModifiers + } + val polyFunctionTpt = ref(defn.PolyFunctionType) + val applyTParams = targs.asInstanceOf[List[TypeDef]] + if (ctx.mode.is(Mode.Type)) { + // Desugar [T_1, ..., T_M] -> (P_1, ..., P_N) => R + // Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } + + val applyVParams = vargs.zipWithIndex.map { + case (p: ValDef, _) => p.withAddedFlags(mods.flags) + case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags) + } + RefinedTypeTree(polyFunctionTpt, List( + DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) + )) + } + else { + // Desugar [T_1, ..., T_M] -> (x_1: P_1, ..., x_N: P_N) => body + // with pt [S_1, ..., S_M] -> (O_1, ..., O_N) => R + // Into new scala.PolyFunction { def apply[T_1, ..., T_M](x_1: P_1, ..., x_N: P_N): R2 = body } + // where R2 is R, with all references to S_1..S_M replaced with T1..T_M. + + def typeTree(tp: Type) = tp match + case RefinedType(parent, nme.apply, PolyType(_, mt)) if parent.typeSymbol eq defn.PolyFunctionClass => + var bail = false + def mapper(tp: Type, topLevel: Boolean = false): Tree = tp match + case tp: TypeRef => ref(tp) + case tp: TypeParamRef => Ident(applyTParams(tp.paramNum).name) + case AppliedType(tycon, args) => AppliedTypeTree(mapper(tycon), args.map(mapper(_))) + case _ => if topLevel then TypeTree() else { bail = true; genericEmptyTree } + val mapped = mapper(mt.resultType, topLevel = true) + if bail then TypeTree() else mapped + case _ => TypeTree() + + val applyVParams = vargs.asInstanceOf[List[ValDef]] + .map(varg => varg.withAddedFlags(mods.flags | Param)) + New(Template(emptyConstructor, List(polyFunctionTpt), Nil, EmptyValDef, + List(DefDef(nme.apply, applyTParams :: applyVParams :: Nil, typeTree(pt), res)) + )) + } + case _ => + // may happen for erroneous input. An error will already have been reported. + assert(ctx.reporter.errorsReported) + EmptyTree + } + + // begin desugar + + // Special case for `Parens` desugaring: unlike all the desugarings below, + // its output is not a new tree but an existing one whose position should + // be preserved, so we shouldn't call `withPos` on it. + tree match { + case Parens(t) => + return t + case _ => + } + + val desugared = tree match { + case PolyFunction(targs, body) => + makePolyFunction(targs, body, pt) orElse tree + case SymbolLit(str) => + Apply( + ref(defn.ScalaSymbolClass.companionModule.termRef), + Literal(Constant(str)) :: Nil) + case InterpolatedString(id, segments) => + val strs = segments map { + case ts: Thicket => ts.trees.head + case t => t + } + val elems = segments flatMap { + case ts: Thicket => ts.trees.tail + case t => Nil + } map { (t: Tree) => t match + // !cc! explicitly typed parameter (t: Tree) is needed since otherwise + // we get an error similar to #16268. (The explicit type constrains the type of `segments` + // which is otherwise List[{*} tree]) + case Block(Nil, EmptyTree) => Literal(Constant(())) // for s"... ${} ..." + case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala + case t => t + } + // This is a deliberate departure from scalac, where StringContext is not rooted (See #4732) + Apply(Select(Apply(scalaDot(nme.StringContext), strs), id).withSpan(tree.span), elems) + case PostfixOp(t, op) => + if (ctx.mode is Mode.Type) && !isBackquoted(op) && op.name == tpnme.raw.STAR then + if ctx.isJava then + AppliedTypeTree(ref(defn.RepeatedParamType), t) + else + Annotated( + AppliedTypeTree(ref(defn.SeqType), t), + New(ref(defn.RepeatedAnnot.typeRef), Nil :: Nil)) + else + assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) + Select(t, op.name) + case PrefixOp(op, t) => + val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme + Select(t, nspace.UNARY_PREFIX ++ op.name) + case ForDo(enums, body) => + makeFor(nme.foreach, nme.foreach, enums, body) orElse tree + case ForYield(enums, body) => + makeFor(nme.map, nme.flatMap, enums, body) orElse tree + case PatDef(mods, pats, tpt, rhs) => + val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) + flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) + case ext: ExtMethods => + Block(List(ext), Literal(Constant(())).withSpan(ext.span)) + case CapturingTypeTree(refs, parent) => + // convert `{refs} T` to `T @retains refs` + // `{refs}-> T` to `-> (T @retainsByName refs)` + def annotate(annotName: TypeName, tp: Tree) = + Annotated(tp, New(scalaAnnotationDot(annotName), List(refs))) + parent match + case ByNameTypeTree(restpt) => + cpy.ByNameTypeTree(parent)(annotate(tpnme.retainsByName, restpt)) + case _ => + annotate(tpnme.retains, parent) + } + desugared.withSpan(tree.span) + } + + /** Turn a fucntion value `handlerFun` into a catch case for a try. + * If `handlerFun` is a partial function, translate to + * + * case ex => + * val ev$1 = handlerFun + * if ev$1.isDefinedAt(ex) then ev$1.apply(ex) else throw ex + * + * Otherwise translate to + * + * case ex => handlerFun.apply(ex) + */ + def makeTryCase(handlerFun: tpd.Tree)(using Context): CaseDef = + val handler = TypedSplice(handlerFun) + val excId = Ident(nme.DEFAULT_EXCEPTION_NAME) + val rhs = + if handlerFun.tpe.widen.isRef(defn.PartialFunctionClass) then + val tmpName = UniqueName.fresh() + val tmpId = Ident(tmpName) + val init = ValDef(tmpName, TypeTree(), handler) + val test = If( + Apply(Select(tmpId, nme.isDefinedAt), excId), + Apply(Select(tmpId, nme.apply), excId), + Throw(excId)) + Block(init :: Nil, test) + else + Apply(Select(handler, nme.apply), excId) + CaseDef(excId, EmptyTree, rhs) + + /** Create a class definition with the same info as the refined type given by `parent` + * and `refinements`. + * + * parent { refinements } + * ==> + * trait extends core { this: self => refinements } + * + * Here, `core` is the (possibly parameterized) class part of `parent`. + * If `parent` is the same as `core`, self is empty. Otherwise `self` is `parent`. + * + * Example: Given + * + * class C + * type T1 = C { type T <: A } + * + * the refined type + * + * T1 { type T <: B } + * + * is expanded to + * + * trait extends C { this: T1 => type T <: A } + * + * The result of this method is used for validity checking, is thrown away afterwards. + * @param parent The type of `parent` + */ + def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(using Context): TypeDef = { + def stripToCore(tp: Type): List[Type] = tp match { + case tp: AppliedType => tp :: Nil + case tp: TypeRef if tp.symbol.isClass => tp :: Nil // monomorphic class type + case tp: TypeProxy => stripToCore(tp.underlying) + case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) + case _ => defn.AnyType :: Nil + } + val parentCores = stripToCore(parent.tpe) + val untpdParent = TypedSplice(parent) + val (classParents, self) = + if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef) + else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) + val impl = Template(emptyConstructor, classParents, Nil, self, refinements) + TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) + } + + /** Returns list of all pattern variables, possibly with their types, + * without duplicates + */ + private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = { + val buf = ListBuffer[VarInfo]() + def seenName(name: Name) = buf exists (_._1.name == name) + def add(named: NameTree, t: Tree): Unit = + if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) + def collect(tree: Tree): Unit = tree match { + case tree @ Bind(nme.WILDCARD, tree1) => + if tree.mods.is(Given) then + val Typed(_, tpt) = tree1: @unchecked + if shouldAddGiven(tree) then + add(tree, tpt) + collect(tree1) + case tree @ Bind(_, Typed(tree1, tpt)) => + if !(tree.mods.is(Given) && !shouldAddGiven(tree)) then + add(tree, tpt) + collect(tree1) + case tree @ Bind(_, tree1) => + add(tree, TypeTree()) + collect(tree1) + case Typed(id: Ident, t) if isVarPattern(id) && id.name != nme.WILDCARD && !isWildcardStarArg(tree) => + add(id, t) + case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => + add(id, TypeTree()) + case Apply(_, args) => + args foreach collect + case Typed(expr, _) => + collect(expr) + case NamedArg(_, arg) => + collect(arg) + case SeqLiteral(elems, _) => + elems foreach collect + case Alternative(trees) => + for (tree <- trees; (vble, _) <- getVariables(tree, shouldAddGiven)) + report.error(IllegalVariableInPatternAlternative(vble.symbol.name), vble.srcPos) + case Annotated(arg, _) => + collect(arg) + case InterpolatedString(_, segments) => + segments foreach collect + case InfixOp(left, _, right) => + collect(left) + collect(right) + case PrefixOp(_, od) => + collect(od) + case Parens(tree) => + collect(tree) + case Tuple(trees) => + trees foreach collect + case Thicket(trees) => + trees foreach collect + case Block(Nil, expr) => + collect(expr) + case Quote(expr) => + new UntypedTreeTraverser { + def traverse(tree: untpd.Tree)(using Context): Unit = tree match { + case Splice(expr) => collect(expr) + case _ => traverseChildren(tree) + } + }.traverse(expr) + case CapturingTypeTree(refs, parent) => + collect(parent) + case _ => + } + collect(tree) + buf.toList + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala b/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala new file mode 100644 index 000000000000..a1c3c0ed0775 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala @@ -0,0 +1,310 @@ +package dotty.tools +package dotc +package ast + +import core._ +import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ +import Symbols._, StdNames._, Trees._ +import Decorators._ +import util.{Property, SourceFile} +import typer.ErrorReporting._ +import transform.SyntheticMembers.ExtendsSingletonMirror + +import scala.annotation.internal.sharable + +/** Helper methods to desugar enums */ +object DesugarEnums { + import untpd._ + + enum CaseKind: + case Simple, Object, Class + + final case class EnumConstraints(minKind: CaseKind, maxKind: CaseKind, enumCases: List[(Int, RefTree)]): + require(minKind.ordinal <= maxKind.ordinal && !(cached && enumCases.isEmpty)) + def requiresCreator = minKind == CaseKind.Simple + def isEnumeration = maxKind.ordinal < CaseKind.Class.ordinal + def cached = minKind.ordinal < CaseKind.Class.ordinal + end EnumConstraints + + /** Attachment containing the number of enum cases, the smallest kind that was seen so far, + * and a list of all the value cases with their ordinals. + */ + val EnumCaseCount: Property.Key[(Int, CaseKind, CaseKind, List[(Int, TermName)])] = Property.Key() + + /** Attachment signalling that when this definition is desugared, it should add any additional + * lookup methods for enums. + */ + val DefinesEnumLookupMethods: Property.Key[Unit] = Property.Key() + + /** The enumeration class that belongs to an enum case. This works no matter + * whether the case is still in the enum class or it has been transferred to the + * companion object. + */ + def enumClass(using Context): Symbol = { + val cls = ctx.owner + if (cls.is(Module)) cls.linkedClass else cls + } + + def enumCompanion(using Context): Symbol = { + val cls = ctx.owner + if (cls.is(Module)) cls.sourceModule else cls.linkedClass.sourceModule + } + + /** Is `tree` an (untyped) enum case? */ + def isEnumCase(tree: Tree)(using Context): Boolean = tree match { + case tree: MemberDef => tree.mods.isEnumCase + case PatDef(mods, _, _, _) => mods.isEnumCase + case _ => false + } + + /** A reference to the enum class `E`, possibly followed by type arguments. + * Each covariant type parameter is approximated by its lower bound. + * Each contravariant type parameter is approximated by its upper bound. + * It is an error if a type parameter is non-variant, or if its approximation + * refers to pther type parameters. + */ + def interpolatedEnumParent(span: Span)(using Context): Tree = { + val tparams = enumClass.typeParams + def isGround(tp: Type) = tp.subst(tparams, tparams.map(_ => NoType)) eq tp + val targs = tparams map { tparam => + if (tparam.is(Covariant) && isGround(tparam.info.bounds.lo)) + tparam.info.bounds.lo + else if (tparam.is(Contravariant) && isGround(tparam.info.bounds.hi)) + tparam.info.bounds.hi + else { + def problem = + if (!tparam.isOneOf(VarianceFlags)) "is invariant" + else "has bounds that depend on a type parameter in the same parameter list" + errorType(em"""cannot determine type argument for enum parent $enumClass, + |type parameter $tparam $problem""", ctx.source.atSpan(span)) + } + } + TypeTree(enumClass.typeRef.appliedTo(targs)).withSpan(span) + } + + /** A type tree referring to `enumClass` */ + def enumClassRef(using Context): Tree = + if (enumClass.exists) TypeTree(enumClass.typeRef) else TypeTree() + + /** Add implied flags to an enum class or an enum case */ + def addEnumFlags(cdef: TypeDef)(using Context): TypeDef = + if (cdef.mods.isEnumClass) cdef.withMods(cdef.mods.withAddedFlags(Abstract | Sealed, cdef.span)) + else if (isEnumCase(cdef)) cdef.withMods(cdef.mods.withAddedFlags(Final, cdef.span)) + else cdef + + private def valuesDot(name: PreName)(implicit src: SourceFile) = + Select(Ident(nme.DOLLAR_VALUES), name.toTermName) + + private def ArrayLiteral(values: List[Tree], tpt: Tree)(using Context): Tree = + val clazzOf = TypeApply(ref(defn.Predef_classOf.termRef), tpt :: Nil) + val ctag = Apply(TypeApply(ref(defn.ClassTagModule_apply.termRef), tpt :: Nil), clazzOf :: Nil) + val apply = Select(ref(defn.ArrayModule.termRef), nme.apply) + Apply(Apply(TypeApply(apply, tpt :: Nil), values), ctag :: Nil) + + /** The following lists of definitions for an enum type E and known value cases e_0, ..., e_n: + * + * private val $values = Array[E](this.e_0,...,this.e_n)(ClassTag[E](classOf[E])) + * def values = $values.clone + * def valueOf($name: String) = $name match { + * case "e_0" => this.e_0 + * ... + * case "e_n" => this.e_n + * case _ => throw new IllegalArgumentException("case not found: " + $name) + * } + */ + private def enumScaffolding(enumValues: List[RefTree])(using Context): List[Tree] = { + val rawEnumClassRef = rawRef(enumClass.typeRef) + extension (tpe: NamedType) def ofRawEnum = AppliedTypeTree(ref(tpe), rawEnumClassRef) + + val privateValuesDef = + ValDef(nme.DOLLAR_VALUES, TypeTree(), ArrayLiteral(enumValues, rawEnumClassRef)) + .withFlags(Private | Synthetic) + + val valuesDef = + DefDef(nme.values, Nil, defn.ArrayType.ofRawEnum, valuesDot(nme.clone_)) + .withFlags(Synthetic) + + val valuesOfBody: Tree = + val defaultCase = + val msg = Apply(Select(Literal(Constant("enum case not found: ")), nme.PLUS), Ident(nme.nameDollar)) + CaseDef(Ident(nme.WILDCARD), EmptyTree, + Throw(New(TypeTree(defn.IllegalArgumentExceptionType), List(msg :: Nil)))) + val stringCases = enumValues.map(enumValue => + CaseDef(Literal(Constant(enumValue.name.toString)), EmptyTree, enumValue) + ) ::: defaultCase :: Nil + Match(Ident(nme.nameDollar), stringCases) + val valueOfDef = DefDef(nme.valueOf, List(param(nme.nameDollar, defn.StringType) :: Nil), + TypeTree(), valuesOfBody) + .withFlags(Synthetic) + + privateValuesDef :: + valuesDef :: + valueOfDef :: Nil + } + + private def enumLookupMethods(constraints: EnumConstraints)(using Context): List[Tree] = + def scaffolding: List[Tree] = + if constraints.isEnumeration then enumScaffolding(constraints.enumCases.map(_._2)) else Nil + def valueCtor: List[Tree] = if constraints.requiresCreator then enumValueCreator :: Nil else Nil + def fromOrdinal: Tree = + def throwArg(ordinal: Tree) = + Throw(New(TypeTree(defn.NoSuchElementExceptionType), List(Select(ordinal, nme.toString_) :: Nil))) + if !constraints.cached then + fromOrdinalMeth(throwArg) + else + def default(ordinal: Tree) = + CaseDef(Ident(nme.WILDCARD), EmptyTree, throwArg(ordinal)) + if constraints.isEnumeration then + fromOrdinalMeth(ordinal => + Try(Apply(valuesDot(nme.apply), ordinal), default(ordinal) :: Nil, EmptyTree)) + else + fromOrdinalMeth(ordinal => + Match(ordinal, + constraints.enumCases.map((i, enumValue) => CaseDef(Literal(Constant(i)), EmptyTree, enumValue)) + :+ default(ordinal))) + + if !enumClass.exists then + // in the case of a double definition of an enum that only defines class cases (see tests/neg/i4470c.scala) + // it seems `enumClass` might be `NoSymbol`; in this case we provide no scaffolding. + Nil + else + scaffolding ::: valueCtor ::: fromOrdinal :: Nil + end enumLookupMethods + + /** A creation method for a value of enum type `E`, which is defined as follows: + * + * private def $new(_$ordinal: Int, $name: String) = new E with scala.runtime.EnumValue { + * def ordinal = _$ordinal // if `E` does not derive from `java.lang.Enum` + * } + */ + private def enumValueCreator(using Context) = { + val creator = New(Template( + constr = emptyConstructor, + parents = enumClassRef :: scalaRuntimeDot(tpnme.EnumValue) :: Nil, + derived = Nil, + self = EmptyValDef, + body = Nil + ).withAttachment(ExtendsSingletonMirror, ())) + DefDef(nme.DOLLAR_NEW, + List(List(param(nme.ordinalDollar_, defn.IntType), param(nme.nameDollar, defn.StringType))), + TypeTree(), creator).withFlags(Private | Synthetic) + } + + /** Is a type parameter in `enumTypeParams` referenced from an enum class case that has + * given type parameters `caseTypeParams`, value parameters `vparamss` and parents `parents`? + * Issues an error if that is the case but the reference is illegal. + * The reference could be illegal for two reasons: + * - explicit type parameters are given + * - it's a value case, i.e. no value parameters are given + */ + def typeParamIsReferenced( + enumTypeParams: List[TypeSymbol], + caseTypeParams: List[TypeDef], + vparamss: List[List[ValDef]], + parents: List[Tree])(using Context): Boolean = { + + object searchRef extends UntypedTreeAccumulator[Boolean] { + var tparamNames = enumTypeParams.map(_.name).toSet[Name] + def underBinders(binders: List[MemberDef], op: => Boolean): Boolean = { + val saved = tparamNames + tparamNames = tparamNames -- binders.map(_.name) + try op + finally tparamNames = saved + } + def apply(x: Boolean, tree: Tree)(using Context): Boolean = x || { + tree match { + case Ident(name) => + val matches = tparamNames.contains(name) + if (matches && (caseTypeParams.nonEmpty || vparamss.isEmpty)) + report.error(em"illegal reference to type parameter $name from enum case", tree.srcPos) + matches + case LambdaTypeTree(lambdaParams, body) => + underBinders(lambdaParams, foldOver(x, tree)) + case RefinedTypeTree(parent, refinements) => + val refinementDefs = refinements collect { case r: MemberDef => r } + underBinders(refinementDefs, foldOver(x, tree)) + case _ => foldOver(x, tree) + } + } + def apply(tree: Tree)(using Context): Boolean = + underBinders(caseTypeParams, apply(false, tree)) + } + + def typeHasRef(tpt: Tree) = searchRef(tpt) + def valDefHasRef(vd: ValDef) = typeHasRef(vd.tpt) + def parentHasRef(parent: Tree): Boolean = parent match { + case Apply(fn, _) => parentHasRef(fn) + case TypeApply(_, targs) => targs.exists(typeHasRef) + case Select(nu, nme.CONSTRUCTOR) => parentHasRef(nu) + case New(tpt) => typeHasRef(tpt) + case parent => parent.isType && typeHasRef(parent) + } + + vparamss.nestedExists(valDefHasRef) || parents.exists(parentHasRef) + } + + /** A pair consisting of + * - the next enum tag + * - scaffolding containing the necessary definitions for singleton enum cases + * unless that scaffolding was already generated by a previous call to `nextEnumKind`. + */ + def nextOrdinal(name: Name, kind: CaseKind, definesLookups: Boolean)(using Context): (Int, List[Tree]) = { + val (ordinal, seenMinKind, seenMaxKind, seenCases) = + ctx.tree.removeAttachment(EnumCaseCount).getOrElse((0, CaseKind.Class, CaseKind.Simple, Nil)) + val minKind = if kind.ordinal < seenMinKind.ordinal then kind else seenMinKind + val maxKind = if kind.ordinal > seenMaxKind.ordinal then kind else seenMaxKind + val cases = name match + case name: TermName => (ordinal, name) :: seenCases + case _ => seenCases + if definesLookups then + val thisRef = This(EmptyTypeIdent) + val cachedValues = cases.reverse.map((i, name) => (i, Select(thisRef, name))) + (ordinal, enumLookupMethods(EnumConstraints(minKind, maxKind, cachedValues))) + else + ctx.tree.pushAttachment(EnumCaseCount, (ordinal + 1, minKind, maxKind, cases)) + (ordinal, Nil) + } + + def param(name: TermName, typ: Type)(using Context): ValDef = param(name, TypeTree(typ)) + def param(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(Param) + + def ordinalMeth(body: Tree)(using Context): DefDef = + DefDef(nme.ordinal, Nil, TypeTree(defn.IntType), body).withAddedFlags(Synthetic) + + def ordinalMethLit(ord: Int)(using Context): DefDef = + ordinalMeth(Literal(Constant(ord))) + + def fromOrdinalMeth(body: Tree => Tree)(using Context): DefDef = + DefDef(nme.fromOrdinal, (param(nme.ordinal, defn.IntType) :: Nil) :: Nil, + rawRef(enumClass.typeRef), body(Ident(nme.ordinal))).withFlags(Synthetic) + + /** Expand a module definition representing a parameterless enum case */ + def expandEnumModule(name: TermName, impl: Template, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = { + assert(impl.body.isEmpty) + if (!enumClass.exists) EmptyTree + else if (impl.parents.isEmpty) + expandSimpleEnumCase(name, mods, definesLookups, span) + else { + val (tag, scaffolding) = nextOrdinal(name, CaseKind.Object, definesLookups) + val impl1 = cpy.Template(impl)(parents = impl.parents :+ scalaRuntimeDot(tpnme.EnumValue), body = Nil) + .withAttachment(ExtendsSingletonMirror, ()) + val vdef = ValDef(name, TypeTree(), New(impl1)).withMods(mods.withAddedFlags(EnumValue, span)) + flatTree(vdef :: scaffolding).withSpan(span) + } + } + + /** Expand a simple enum case */ + def expandSimpleEnumCase(name: TermName, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = + if (!enumClass.exists) EmptyTree + else if (enumClass.typeParams.nonEmpty) { + val parent = interpolatedEnumParent(span) + val impl = Template(emptyConstructor, parent :: Nil, Nil, EmptyValDef, Nil) + expandEnumModule(name, impl, mods, definesLookups, span) + } + else { + val (tag, scaffolding) = nextOrdinal(name, CaseKind.Simple, definesLookups) + val creator = Apply(Ident(nme.DOLLAR_NEW), List(Literal(Constant(tag)), Literal(Constant(name.toString)))) + val vdef = ValDef(name, enumClassRef, creator).withMods(mods.withAddedFlags(EnumValue, span)) + flatTree(vdef :: scaffolding).withSpan(span) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala b/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala new file mode 100644 index 000000000000..c0cf2c0d1b81 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala @@ -0,0 +1,449 @@ +package dotty.tools.dotc +package ast + +import core._ +import Symbols._, Types._, Contexts._, Decorators._, util.Spans._, Flags._, Constants._ +import StdNames.{nme, tpnme} +import ast.Trees._ +import Names.Name +import Comments.Comment +import NameKinds.DefaultGetterName +import Annotations.Annotation + +object MainProxies { + + /** Generate proxy classes for @main functions and @myMain functions where myMain <:< MainAnnotation */ + def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + mainAnnotationProxies(stats) ++ mainProxies(stats) + } + + /** Generate proxy classes for @main functions. + * A function like + * + * @main def f(x: S, ys: T*) = ... + * + * would be translated to something like + * + * import CommandLineParser._ + * class f { + * @static def main(args: Array[String]): Unit = + * try + * f( + * parseArgument[S](args, 0), + * parseRemainingArguments[T](args, 1): _* + * ) + * catch case err: ParseError => showError(err) + * } + */ + private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + import tpd._ + def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { + case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => + stat.symbol :: Nil + case stat @ TypeDef(name, impl: Template) if stat.symbol.is(Module) => + mainMethods(impl.body) + case _ => + Nil + } + mainMethods(stats).flatMap(mainProxy) + } + + import untpd._ + private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = { + val mainAnnotSpan = mainFun.getAnnotation(defn.MainAnnot).get.tree.span + def pos = mainFun.sourcePos + val argsRef = Ident(nme.args) + + def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = + if (mt.isImplicitMethod) { + report.error(em"@main method cannot have implicit parameters", pos) + call + } + else { + val args = mt.paramInfos.zipWithIndex map { + (formal, n) => + val (parserSym, formalElem) = + if (formal.isRepeatedParam) (defn.CLP_parseRemainingArguments, formal.argTypes.head) + else (defn.CLP_parseArgument, formal) + val arg = Apply( + TypeApply(ref(parserSym.termRef), TypeTree(formalElem) :: Nil), + argsRef :: Literal(Constant(idx + n)) :: Nil) + if (formal.isRepeatedParam) repeated(arg) else arg + } + val call1 = Apply(call, args) + mt.resType match { + case restpe: MethodType => + if (mt.paramInfos.lastOption.getOrElse(NoType).isRepeatedParam) + report.error(em"varargs parameter of @main method must come last", pos) + addArgs(call1, restpe, idx + args.length) + case _ => + call1 + } + } + + var result: List[TypeDef] = Nil + if (!mainFun.owner.isStaticOwner) + report.error(em"@main method is not statically accessible", pos) + else { + var call = ref(mainFun.termRef) + mainFun.info match { + case _: ExprType => + case mt: MethodType => + call = addArgs(call, mt, 0) + case _: PolyType => + report.error(em"@main method cannot have type parameters", pos) + case _ => + report.error(em"@main can only annotate a method", pos) + } + val errVar = Ident(nme.error) + val handler = CaseDef( + Typed(errVar, TypeTree(defn.CLP_ParseError.typeRef)), + EmptyTree, + Apply(ref(defn.CLP_showError.termRef), errVar :: Nil)) + val body = Try(call, handler :: Nil, EmptyTree) + val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) + .withFlags(Param) + /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. + * The annotations will be retype-checked in another scope that may not have the same imports. + */ + def insertTypeSplices = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match + case tree: tpd.Ident @unchecked => TypedSplice(tree) + case tree => super.transform(tree) + } + val annots = mainFun.annotations + .filterNot(_.matches(defn.MainAnnot)) + .map(annot => insertTypeSplices.transform(annot.tree)) + val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) + .withFlags(JavaStatic | Synthetic) + .withAnnotations(annots) + val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) + val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) + .withFlags(Final | Invisible) + + if (!ctx.reporter.hasErrors) + result = mainCls.withSpan(mainAnnotSpan.toSynthetic) :: Nil + } + result + } + + private type DefaultValueSymbols = Map[Int, Symbol] + private type ParameterAnnotationss = Seq[Seq[Annotation]] + + /** + * Generate proxy classes for main functions. + * A function like + * + * /** + * * Lorem ipsum dolor sit amet + * * consectetur adipiscing elit. + * * + * * @param x my param x + * * @param ys all my params y + * */ + * @myMain(80) def f( + * @myMain.Alias("myX") x: S, + * y: S, + * ys: T* + * ) = ... + * + * would be translated to something like + * + * final class f { + * static def main(args: Array[String]): Unit = { + * val annotation = new myMain(80) + * val info = new Info( + * name = "f", + * documentation = "Lorem ipsum dolor sit amet consectetur adipiscing elit.", + * parameters = Seq( + * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))), + * new scala.annotation.MainAnnotation.Parameter("y", "S", true, false, "", Seq()), + * new scala.annotation.MainAnnotation.Parameter("ys", "T", false, true, "all my params y", Seq()) + * ) + * ), + * val command = annotation.command(info, args) + * if command.isDefined then + * val cmd = command.get + * val args0: () => S = annotation.argGetter[S](info.parameters(0), cmd(0), None) + * val args1: () => S = annotation.argGetter[S](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) + * val args2: () => Seq[T] = annotation.varargGetter[T](info.parameters(2), cmd.drop(2)) + * annotation.run(() => f(args0(), args1(), args2()*)) + * } + * } + */ + private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + import tpd._ + + /** + * Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this + * point of the compilation, they must be explicitly passed by [[mainProxy]]. + */ + def defaultValueSymbols(scope: Tree, funSymbol: Symbol): DefaultValueSymbols = + scope match { + case TypeDef(_, template: Template) => + template.body.flatMap((_: Tree) match { + case dd: DefDef if dd.name.is(DefaultGetterName) && dd.name.firstPart == funSymbol.name => + val DefaultGetterName.NumberedInfo(index) = dd.name.info: @unchecked + List(index -> dd.symbol) + case _ => Nil + }).toMap + case _ => Map.empty + } + + /** Computes the list of main methods present in the code. */ + def mainMethods(scope: Tree, stats: List[Tree]): List[(Symbol, ParameterAnnotationss, DefaultValueSymbols, Option[Comment])] = stats.flatMap { + case stat: DefDef => + val sym = stat.symbol + sym.annotations.filter(_.matches(defn.MainAnnotationClass)) match { + case Nil => + Nil + case _ :: Nil => + val paramAnnotations = stat.paramss.flatMap(_.map( + valdef => valdef.symbol.annotations.filter(_.matches(defn.MainAnnotationParameterAnnotation)) + )) + (sym, paramAnnotations.toVector, defaultValueSymbols(scope, sym), stat.rawComment) :: Nil + case mainAnnot :: others => + report.error(em"method cannot have multiple main annotations", mainAnnot.tree) + Nil + } + case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => + mainMethods(stat, impl.body) + case _ => + Nil + } + + // Assuming that the top-level object was already generated, all main methods will have a scope + mainMethods(EmptyTree, stats).flatMap(mainAnnotationProxy) + } + + private def mainAnnotationProxy(mainFun: Symbol, paramAnnotations: ParameterAnnotationss, defaultValueSymbols: DefaultValueSymbols, docComment: Option[Comment])(using Context): Option[TypeDef] = { + val mainAnnot = mainFun.getAnnotation(defn.MainAnnotationClass).get + def pos = mainFun.sourcePos + + val documentation = new Documentation(docComment) + + /** () => value */ + def unitToValue(value: Tree): Tree = + val defDef = DefDef(nme.ANON_FUN, List(Nil), TypeTree(), value) + Block(defDef, Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) + + /** Generate a list of trees containing the ParamInfo instantiations. + * + * A ParamInfo has the following shape + * ``` + * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))) + * ``` + */ + def parameterInfos(mt: MethodType): List[Tree] = + extension (tree: Tree) def withProperty(sym: Symbol, args: List[Tree]) = + Apply(Select(tree, sym.name), args) + + for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield + val param = paramName.toString + val paramType0 = if formal.isRepeatedParam then formal.argTypes.head.dealias else formal.dealias + val paramType = paramType0.dealias + val paramTypeOwner = paramType.typeSymbol.owner + val paramTypeStr = + if paramTypeOwner == defn.EmptyPackageClass then paramType.show + else paramTypeOwner.showFullName + "." + paramType.show + val hasDefault = defaultValueSymbols.contains(idx) + val isRepeated = formal.isRepeatedParam + val paramDoc = documentation.argDocs.getOrElse(param, "") + val paramAnnots = + val annotationTrees = paramAnnotations(idx).map(instantiateAnnotation).toList + Apply(ref(defn.SeqModule.termRef), annotationTrees) + + val constructorArgs = List(param, paramTypeStr, hasDefault, isRepeated, paramDoc) + .map(value => Literal(Constant(value))) + + New(TypeTree(defn.MainAnnotationParameter.typeRef), List(constructorArgs :+ paramAnnots)) + + end parameterInfos + + /** + * Creates a list of references and definitions of arguments. + * The goal is to create the + * `val args0: () => S = annotation.argGetter[S](0, cmd(0), None)` + * part of the code. + */ + def argValDefs(mt: MethodType): List[ValDef] = + for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield + val argName = nme.args ++ idx.toString + val isRepeated = formal.isRepeatedParam + val formalType = if isRepeated then formal.argTypes.head else formal + val getterName = if isRepeated then nme.varargGetter else nme.argGetter + val defaultValueGetterOpt = defaultValueSymbols.get(idx) match + case None => ref(defn.NoneModule.termRef) + case Some(dvSym) => + val value = unitToValue(ref(dvSym.termRef)) + Apply(ref(defn.SomeClass.companionModule.termRef), value) + val argGetter0 = TypeApply(Select(Ident(nme.annotation), getterName), TypeTree(formalType) :: Nil) + val index = Literal(Constant(idx)) + val paramInfo = Apply(Select(Ident(nme.info), nme.parameters), index) + val argGetter = + if isRepeated then Apply(argGetter0, List(paramInfo, Apply(Select(Ident(nme.cmd), nme.drop), List(index)))) + else Apply(argGetter0, List(paramInfo, Apply(Ident(nme.cmd), List(index)), defaultValueGetterOpt)) + ValDef(argName, TypeTree(), argGetter) + end argValDefs + + + /** Create a list of argument references that will be passed as argument to the main method. + * `args0`, ...`argn*` + */ + def argRefs(mt: MethodType): List[Tree] = + for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield + val argRef = Apply(Ident(nme.args ++ idx.toString), Nil) + if formal.isRepeatedParam then repeated(argRef) else argRef + end argRefs + + + /** Turns an annotation (e.g. `@main(40)`) into an instance of the class (e.g. `new scala.main(40)`). */ + def instantiateAnnotation(annot: Annotation): Tree = + val argss = { + def recurse(t: tpd.Tree, acc: List[List[Tree]]): List[List[Tree]] = t match { + case Apply(t, args: List[tpd.Tree]) => recurse(t, extractArgs(args) :: acc) + case _ => acc + } + + def extractArgs(args: List[tpd.Tree]): List[Tree] = + args.flatMap { + case Typed(SeqLiteral(varargs, _), _) => varargs.map(arg => TypedSplice(arg)) + case arg: Select if arg.name.is(DefaultGetterName) => Nil // Ignore default values, they will be added later by the compiler + case arg => List(TypedSplice(arg)) + } + + recurse(annot.tree, Nil) + } + + New(TypeTree(annot.symbol.typeRef), argss) + end instantiateAnnotation + + def generateMainClass(mainCall: Tree, args: List[Tree], parameterInfos: List[Tree]): TypeDef = + val cmdInfo = + val nameTree = Literal(Constant(mainFun.showName)) + val docTree = Literal(Constant(documentation.mainDoc)) + val paramInfos = Apply(ref(defn.SeqModule.termRef), parameterInfos) + New(TypeTree(defn.MainAnnotationInfo.typeRef), List(List(nameTree, docTree, paramInfos))) + + val annotVal = ValDef( + nme.annotation, + TypeTree(), + instantiateAnnotation(mainAnnot) + ) + val infoVal = ValDef( + nme.info, + TypeTree(), + cmdInfo + ) + val command = ValDef( + nme.command, + TypeTree(), + Apply( + Select(Ident(nme.annotation), nme.command), + List(Ident(nme.info), Ident(nme.args)) + ) + ) + val argsVal = ValDef( + nme.cmd, + TypeTree(), + Select(Ident(nme.command), nme.get) + ) + val run = Apply(Select(Ident(nme.annotation), nme.run), mainCall) + val body0 = If( + Select(Ident(nme.command), nme.isDefined), + Block(argsVal :: args, run), + EmptyTree + ) + val body = Block(List(annotVal, infoVal, command), body0) // TODO add `if (cmd.nonEmpty)` + + val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) + .withFlags(Param) + /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. + * The annotations will be retype-checked in another scope that may not have the same imports. + */ + def insertTypeSplices = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match + case tree: tpd.Ident @unchecked => TypedSplice(tree) + case tree => super.transform(tree) + } + val annots = mainFun.annotations + .filterNot(_.matches(defn.MainAnnotationClass)) + .map(annot => insertTypeSplices.transform(annot.tree)) + val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) + .withFlags(JavaStatic) + .withAnnotations(annots) + val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) + val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) + .withFlags(Final | Invisible) + mainCls.withSpan(mainAnnot.tree.span.toSynthetic) + end generateMainClass + + if (!mainFun.owner.isStaticOwner) + report.error(em"main method is not statically accessible", pos) + None + else mainFun.info match { + case _: ExprType => + Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) + case mt: MethodType => + if (mt.isImplicitMethod) + report.error(em"main method cannot have implicit parameters", pos) + None + else mt.resType match + case restpe: MethodType => + report.error(em"main method cannot be curried", pos) + None + case _ => + Some(generateMainClass(unitToValue(Apply(ref(mainFun.termRef), argRefs(mt))), argValDefs(mt), parameterInfos(mt))) + case _: PolyType => + report.error(em"main method cannot have type parameters", pos) + None + case _ => + report.error(em"main can only annotate a method", pos) + None + } + } + + /** A class responsible for extracting the docstrings of a method. */ + private class Documentation(docComment: Option[Comment]): + import util.CommentParsing._ + + /** The main part of the documentation. */ + lazy val mainDoc: String = _mainDoc + /** The parameters identified by @param. Maps from parameter name to its documentation. */ + lazy val argDocs: Map[String, String] = _argDocs + + private var _mainDoc: String = "" + private var _argDocs: Map[String, String] = Map() + + docComment match { + case Some(comment) => if comment.isDocComment then parseDocComment(comment.raw) else _mainDoc = comment.raw + case None => + } + + private def cleanComment(raw: String): String = + var lines: Seq[String] = raw.trim.nn.split('\n').nn.toSeq + lines = lines.map(l => l.substring(skipLineLead(l, -1), l.length).nn.trim.nn) + var s = lines.foldLeft("") { + case ("", s2) => s2 + case (s1, "") if s1.last == '\n' => s1 // Multiple newlines are kept as single newlines + case (s1, "") => s1 + '\n' + case (s1, s2) if s1.last == '\n' => s1 + s2 + case (s1, s2) => s1 + ' ' + s2 + } + s.replaceAll(raw"\[\[", "").nn.replaceAll(raw"\]\]", "").nn.trim.nn + + private def parseDocComment(raw: String): Unit = + // Positions of the sections (@) in the docstring + val tidx: List[(Int, Int)] = tagIndex(raw) + + // Parse main comment + var mainComment: String = raw.substring(skipLineLead(raw, 0), startTag(raw, tidx)).nn + _mainDoc = cleanComment(mainComment) + + // Parse arguments comments + val argsCommentsSpans: Map[String, (Int, Int)] = paramDocs(raw, "@param", tidx) + val argsCommentsTextSpans = argsCommentsSpans.view.mapValues(extractSectionText(raw, _)) + val argsCommentsTexts = argsCommentsTextSpans.mapValues({ case (beg, end) => raw.substring(beg, end).nn }) + _argDocs = argsCommentsTexts.mapValues(cleanComment(_)).toMap + end Documentation +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala b/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala new file mode 100644 index 000000000000..054ffe66f323 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala @@ -0,0 +1,129 @@ +package dotty.tools.dotc +package ast + +import core.Contexts._ +import core.Decorators._ +import util.Spans._ +import Trees.{MemberDef, DefTree, WithLazyField} +import dotty.tools.dotc.core.Types.AnnotatedType +import dotty.tools.dotc.core.Types.ImportType +import dotty.tools.dotc.core.Types.Type + +/** Utility functions to go from typed to untyped ASTs */ +// TODO: Handle trees with mixed source files +object NavigateAST { + + /** The untyped tree corresponding to typed tree `tree` in the compilation + * unit specified by `ctx` + */ + def toUntyped(tree: tpd.Tree)(using Context): untpd.Tree = + untypedPath(tree, exactMatch = true) match { + case (utree: untpd.Tree) :: _ => + utree + case _ => + val loosePath = untypedPath(tree, exactMatch = false) + throw new + Error(i"""no untyped tree for $tree, pos = ${tree.sourcePos} + |best matching path =\n$loosePath%\n====\n% + |path positions = ${loosePath.map(_.sourcePos)}""") + } + + /** The reverse path of untyped trees starting with a tree that closest matches + * `tree` and ending in the untyped tree at the root of the compilation unit + * specified by `ctx`. + * @param exactMatch If `true`, the path must start with a node that exactly + * matches `tree`, or `Nil` is returned. + * If `false` the path might start with a node enclosing + * the logical position of `tree`. + * Note: A complication concerns member definitions. ValDefs and DefDefs + * have after desugaring a position that spans just the name of the symbol being + * defined and nothing else. So we look instead for an untyped tree approximating the + * envelope of the definition, and declare success if we find another DefTree. + */ + def untypedPath(tree: tpd.Tree, exactMatch: Boolean = false)(using Context): List[Positioned] = + tree match { + case tree: MemberDef[?] => + untypedPath(tree.span) match { + case path @ (last: DefTree[?]) :: _ => path + case path if !exactMatch => path + case _ => Nil + } + case _ => + untypedPath(tree.span) match { + case (path @ last :: _) if last.span == tree.span || !exactMatch => path + case _ => Nil + } + } + + /** The reverse part of the untyped root of the compilation unit of `ctx` to + * the given `span`. + */ + def untypedPath(span: Span)(using Context): List[Positioned] = + pathTo(span, List(ctx.compilationUnit.untpdTree)) + + + /** The reverse path from any node in `from` to the node that closest encloses `span`, + * or `Nil` if no such path exists. If a non-empty path is returned it starts with + * the node closest enclosing `span` and ends with one of the nodes in `from`. + * + * @param skipZeroExtent If true, skip over zero-extent nodes in the search. These nodes + * do not correspond to code the user wrote since their start and + * end point are the same, so this is useful when trying to reconcile + * nodes with source code. + */ + def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { + def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = { + var bestFit: List[Positioned] = path + while (it.hasNext) { + val path1 = it.next() match { + case p: Positioned => singlePath(p, path) + case m: untpd.Modifiers => childPath(m.productIterator, path) + case xs: List[?] => childPath(xs.iterator, path) + case _ => path + } + if ((path1 ne path) && + ((bestFit eq path) || + bestFit.head.span != path1.head.span && + bestFit.head.span.contains(path1.head.span))) + bestFit = path1 + } + bestFit + } + /* + * Annotations trees are located in the Type + */ + def unpackAnnotations(t: Type, path: List[Positioned]): List[Positioned] = + t match { + case ann: AnnotatedType => + unpackAnnotations(ann.parent, childPath(ann.annot.tree.productIterator, path)) + case imp: ImportType => + childPath(imp.expr.productIterator, path) + case other => + path + } + def singlePath(p: Positioned, path: List[Positioned]): List[Positioned] = + if (p.span.exists && !(skipZeroExtent && p.span.isZeroExtent) && p.span.contains(span)) { + // FIXME: We shouldn't be manually forcing trees here, we should replace + // our usage of `productIterator` by something in `Positioned` that takes + // care of low-level details like this for us. + p match { + case p: WithLazyField[?] => + p.forceIfLazy + case _ => + } + val iterator = p match + case defdef: DefTree[?] => + p.productIterator ++ defdef.mods.productIterator + case _ => + p.productIterator + childPath(iterator, p :: path) + } + else { + p match { + case t: untpd.TypeTree => unpackAnnotations(t.typeOpt, path) + case _ => path + } + } + childPath(from.iterator, Nil) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala new file mode 100644 index 000000000000..fd30d441a6ee --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala @@ -0,0 +1,246 @@ +package dotty.tools +package dotc +package ast + +import util.Spans._ +import util.{SourceFile, SourcePosition, SrcPos} +import core.Contexts._ +import core.Decorators._ +import core.NameOps._ +import core.Flags.{JavaDefined, ExtensionMethod} +import core.StdNames.nme +import ast.Trees.mods +import annotation.constructorOnly +import annotation.internal.sharable + +/** A base class for things that have positions (currently: modifiers and trees) + */ +abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable, caps.Pure { + import Positioned.{ids, nextId, debugId} + + private var mySpan: Span = _ + + private var mySource: SourceFile = src + + /** A unique identifier in case -Yshow-tree-ids, or -Ydebug-tree-with-id + * is set, -1 otherwise. + */ + def uniqueId: Int = + if ids != null && ids.nn.containsKey(this) then ids.nn.get(this).nn else -1 + + private def allocateId() = + if ids != null then + val ownId = nextId + nextId += 1 + ids.nn.put(this: @unchecked, ownId) + if ownId == debugId then + println(s"Debug tree (id=$debugId) creation \n${this: @unchecked}\n") + Thread.dumpStack() + + allocateId() + + /** The span part of the item's position */ + def span: Span = mySpan + + def span_=(span: Span): Unit = + mySpan = span + + span = envelope(src) + + def source: SourceFile = mySource + + def sourcePos(using Context): SourcePosition = source.atSpan(span) + + /** This positioned item, widened to `SrcPos`. Used to make clear we only need the + * position, typically for error reporting. + */ + final def srcPos: SrcPos = this + + /** A positioned item like this one with given `span`. + * If the positioned item is source-derived, a clone is returned. + * If the positioned item is synthetic, the position is updated + * destructively and the item itself is returned. + */ + def withSpan(span: Span): this.type = + if (span == mySpan) this + else { + val newpd: this.type = + if !mySpan.exists then + if span.exists then envelope(source, span.startPos) // fill in children spans + this + else + cloneIn(source) + newpd.span = span + newpd + } + + /** The union of startSpan and the spans of all positioned children that + * have the same source as this node, except that Inlined nodes only + * consider their `call` child. + * + * Side effect: Any descendants without spans have but with the same source as this + * node have their span set to the end position of the envelope of all children to + * the left, or, if that one does not exist, to the start position of the envelope + * of all children to the right. + */ + def envelope(src: SourceFile, startSpan: Span = NoSpan): Span = (this: @unchecked) match { + case Trees.Inlined(call, _, _) => + call.span + case _ => + def include(span: Span, x: Any): Span = x match { + case p: Positioned => + if (p.source != src) span + else if (p.span.exists) span.union(p.span) + else if (span.exists) { + if (span.end != MaxOffset) + p.span = p.envelope(src, span.endPos) + span + } + else // No span available to assign yet, signal this by returning a span with MaxOffset end + Span(MaxOffset, MaxOffset) + case m: untpd.Modifiers => + include(include(span, m.mods), m.annotations) + case y :: ys => + include(include(span, y), ys) + case _ => span + } + val limit = productArity + def includeChildren(span: Span, n: Int): Span = + if (n < limit) includeChildren(include(span, productElement(n): @unchecked), n + 1) + else span + val span1 = includeChildren(startSpan, 0) + val span2 = + if (!span1.exists || span1.end != MaxOffset) + span1 + else if (span1.start == MaxOffset) + // No positioned child was found + NoSpan + else + ///println(s"revisit $uniqueId with $span1") + // We have some children left whose span could not be assigned. + // Go through it again with the known start position. + includeChildren(span1.startPos, 0) + span2.toSynthetic + } + + /** Clone this node but assign it a fresh id which marks it as a node in `file`. */ + def cloneIn(src: SourceFile): this.type = { + val newpd: this.type = clone.asInstanceOf[this.type] + newpd.allocateId() + newpd.mySource = src + newpd + } + + def contains(that: Positioned): Boolean = { + def isParent(x: Any): Boolean = x match { + case x: Positioned => + x.contains(that) + case m: untpd.Modifiers => + m.mods.exists(isParent) || m.annotations.exists(isParent) + case xs: List[?] => + xs.exists(isParent) + case _ => + false + } + (this eq that) || + (this.span contains that.span) && { + var n = productArity + var found = false + while (!found && n > 0) { + n -= 1 + found = isParent(productElement(n)) + } + found + } + } + + /** Check that all positioned items in this tree satisfy the following conditions: + * - Parent spans contain child spans + * - If item is a non-empty tree, it has a position + */ + def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { + import untpd._ + var lastPositioned: Positioned | Null = null + var lastSpan = NoSpan + def check(p: Any): Unit = p match { + case p: Positioned => + assert(span contains p.span, + i"""position error, parent span does not contain child span + |parent = $this # $uniqueId, + |parent span = $span, + |child = $p # ${p.uniqueId}, + |child span = ${p.span}""".stripMargin) + p match { + case tree: Tree if !tree.isEmpty => + assert(tree.span.exists, + s"position error: position not set for $tree # ${tree.uniqueId}") + case _ => + } + if nonOverlapping then + this match { + case _: XMLBlock => + // FIXME: Trees generated by the XML parser do not satisfy `checkPos` + case _: WildcardFunction + if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] => + // ignore transition from last wildcard parameter to body + case _ => + assert(!lastSpan.exists || !p.span.exists || lastSpan.end <= p.span.start, + i"""position error, child positions overlap or in wrong order + |parent = $this + |1st child = $lastPositioned + |1st child span = $lastSpan + |2nd child = $p + |2nd child span = ${p.span}""".stripMargin) + } + lastPositioned = p + lastSpan = p.span + p.checkPos(nonOverlapping) + case m: untpd.Modifiers => + m.annotations.foreach(check) + m.mods.foreach(check) + case xs: List[?] => + xs.foreach(check) + case _ => + } + this match { + case tree: DefDef if tree.name == nme.CONSTRUCTOR && tree.mods.is(JavaDefined) => + // Special treatment for constructors coming from Java: + // Leave out leading type params, they are copied with wrong positions from parent class + check(tree.mods) + check(tree.trailingParamss) + case tree: DefDef if tree.mods.is(ExtensionMethod) => + tree.paramss match + case vparams1 :: vparams2 :: rest if tree.name.isRightAssocOperatorName => + // omit check for right-associatiove extension methods; their parameters were swapped + case _ => + check(tree.paramss) + check(tree.tpt) + check(tree.rhs) + case _ => + val end = productArity + var n = 0 + while (n < end) { + check(productElement(n)) + n += 1 + } + } + } + catch { + case ex: AssertionError => + println(i"error while checking $this") + throw ex + } +} + +object Positioned { + @sharable private var debugId = Int.MinValue + @sharable private var ids: java.util.WeakHashMap[Positioned, Int] | Null = null + @sharable private var nextId: Int = 0 + + def init(using Context): Unit = + debugId = ctx.settings.YdebugTreeWithId.value + if ids == null && ctx.settings.YshowTreeIds.value + || debugId != ctx.settings.YdebugTreeWithId.default + then + ids = java.util.WeakHashMap() +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala new file mode 100644 index 000000000000..b650a0088de4 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala @@ -0,0 +1,1070 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Flags._, Trees._, Types._, Contexts._ +import Names._, StdNames._, NameOps._, Symbols._ +import typer.ConstFold +import reporting.trace +import dotty.tools.dotc.transform.SymUtils._ +import Decorators._ +import Constants.Constant +import scala.collection.mutable + +import scala.annotation.tailrec + +trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => + + def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree + + def isDeclarationOrTypeDef(tree: Tree): Boolean = unsplice(tree) match { + case DefDef(_, _, _, EmptyTree) + | ValDef(_, _, EmptyTree) + | TypeDef(_, _) => true + case _ => false + } + + def isOpAssign(tree: Tree): Boolean = unsplice(tree) match { + case Apply(fn, _ :: _) => + unsplice(fn) match { + case Select(_, name) if name.isOpAssignmentName => true + case _ => false + } + case _ => false + } + + class MatchingArgs(params: List[Symbol], args: List[Tree])(using Context) { + def foreach(f: (Symbol, Tree) => Unit): Boolean = { + def recur(params: List[Symbol], args: List[Tree]): Boolean = params match { + case Nil => args.isEmpty + case param :: params1 => + if (param.info.isRepeatedParam) { + for (arg <- args) f(param, arg) + true + } + else args match { + case Nil => false + case arg :: args1 => + f(param, args.head) + recur(params1, args1) + } + } + recur(params, args) + } + def zipped: List[(Symbol, Tree)] = map((_, _)) + def map[R](f: (Symbol, Tree) => R): List[R] = { + val b = List.newBuilder[R] + foreach(b += f(_, _)) + b.result() + } + } + + /** The method part of an application node, possibly enclosed in a block + * with only valdefs as statements. the reason for also considering blocks + * is that named arguments can transform a call into a block, e.g. + * (b = foo, a = bar) + * is transformed to + * { val x$1 = foo + * val x$2 = bar + * (x$2, x$1) + * } + */ + def methPart(tree: Tree): Tree = stripApply(tree) match { + case TypeApply(fn, _) => methPart(fn) + case AppliedTypeTree(fn, _) => methPart(fn) // !!! should not be needed + case Block(stats, expr) => methPart(expr) + case mp => mp + } + + /** If this is an application, its function part, stripping all + * Apply nodes (but leaving TypeApply nodes in). Otherwise the tree itself. + */ + def stripApply(tree: Tree): Tree = unsplice(tree) match { + case Apply(fn, _) => stripApply(fn) + case _ => tree + } + + /** If this is a block, its expression part */ + def stripBlock(tree: Tree): Tree = unsplice(tree) match { + case Block(_, expr) => stripBlock(expr) + case Inlined(_, _, expr) => stripBlock(expr) + case _ => tree + } + + def stripInlined(tree: Tree): Tree = unsplice(tree) match { + case Inlined(_, _, expr) => stripInlined(expr) + case _ => tree + } + + def stripAnnotated(tree: Tree): Tree = tree match { + case Annotated(arg, _) => arg + case _ => tree + } + + /** The number of arguments in an application */ + def numArgs(tree: Tree): Int = unsplice(tree) match { + case Apply(fn, args) => numArgs(fn) + args.length + case TypeApply(fn, _) => numArgs(fn) + case Block(_, expr) => numArgs(expr) + case _ => 0 + } + + /** All term arguments of an application in a single flattened list */ + def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { + case Apply(fn, args) => allArguments(fn) ::: args + case TypeApply(fn, _) => allArguments(fn) + case Block(_, expr) => allArguments(expr) + case _ => Nil + } + + /** Is tree explicitly parameterized with type arguments? */ + def hasExplicitTypeArgs(tree: Tree): Boolean = tree match + case TypeApply(tycon, args) => + args.exists(arg => !arg.span.isZeroExtent && !tycon.span.contains(arg.span)) + case _ => false + + /** Is tree a path? */ + def isPath(tree: Tree): Boolean = unsplice(tree) match { + case Ident(_) | This(_) | Super(_, _) => true + case Select(qual, _) => isPath(qual) + case _ => false + } + + /** Is tree a self constructor call this(...)? I.e. a call to a constructor of the + * same object? + */ + def isSelfConstrCall(tree: Tree): Boolean = methPart(tree) match { + case Ident(nme.CONSTRUCTOR) | Select(This(_), nme.CONSTRUCTOR) => true + case _ => false + } + + /** Is tree a super constructor call? + */ + def isSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { + case Select(Super(_, _), nme.CONSTRUCTOR) => true + case _ => false + } + + def isSuperSelection(tree: Tree): Boolean = unsplice(tree) match { + case Select(Super(_, _), _) => true + case _ => false + } + + def isSelfOrSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { + case Ident(nme.CONSTRUCTOR) + | Select(This(_), nme.CONSTRUCTOR) + | Select(Super(_, _), nme.CONSTRUCTOR) => true + case _ => false + } + + /** Is tree a backquoted identifier or definition */ + def isBackquoted(tree: Tree): Boolean = tree.hasAttachment(Backquoted) + + /** Is tree a variable pattern? */ + def isVarPattern(pat: Tree): Boolean = unsplice(pat) match { + case x: Ident => x.name.isVarPattern && !isBackquoted(x) + case _ => false + } + + /** The first constructor definition in `stats` */ + def firstConstructor(stats: List[Tree]): Tree = stats match { + case (meth: DefDef) :: _ if meth.name.isConstructorName => meth + case stat :: stats => firstConstructor(stats) + case nil => EmptyTree + } + + /** Is tpt a vararg type of the form T* or => T*? */ + def isRepeatedParamType(tpt: Tree)(using Context): Boolean = stripByNameType(tpt) match { + case tpt: TypeTree => tpt.typeOpt.isRepeatedParam + case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS), _) => true + case _ => false + } + + /** Is this argument node of the form *, or is it a reference to + * such an argument ? The latter case can happen when an argument is lifted. + */ + def isWildcardStarArg(tree: Tree)(using Context): Boolean = unbind(tree) match { + case Typed(Ident(nme.WILDCARD_STAR), _) => true + case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true + case Typed(_, tpt: TypeTree) => tpt.typeOpt.isRepeatedParam + case NamedArg(_, arg) => isWildcardStarArg(arg) + case arg => arg.typeOpt.widen.isRepeatedParam + } + + /** Is tree a type tree of the form `=> T` or (under pureFunctions) `{refs}-> T`? */ + def isByNameType(tree: Tree)(using Context): Boolean = + stripByNameType(tree) ne tree + + /** Strip `=> T` to `T` and (under pureFunctions) `{refs}-> T` to `T` */ + def stripByNameType(tree: Tree)(using Context): Tree = unsplice(tree) match + case ByNameTypeTree(t1) => t1 + case untpd.CapturingTypeTree(_, parent) => + val parent1 = stripByNameType(parent) + if parent1 eq parent then tree else parent1 + case _ => tree + + /** All type and value parameter symbols of this DefDef */ + def allParamSyms(ddef: DefDef)(using Context): List[Symbol] = + ddef.paramss.flatten.map(_.symbol) + + /** Does this argument list end with an argument of the form : _* ? */ + def isWildcardStarArgList(trees: List[Tree])(using Context): Boolean = + trees.nonEmpty && isWildcardStarArg(trees.last) + + /** Is the argument a wildcard argument of the form `_` or `x @ _`? + */ + def isWildcardArg(tree: Tree): Boolean = unbind(tree) match { + case Ident(nme.WILDCARD) => true + case _ => false + } + + /** Does this list contain a named argument tree? */ + def hasNamedArg(args: List[Any]): Boolean = args exists isNamedArg + val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]] + + /** Is this pattern node a catch-all (wildcard or variable) pattern? */ + def isDefaultCase(cdef: CaseDef): Boolean = cdef match { + case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) + case _ => false + } + + /** Does this CaseDef catch Throwable? */ + def catchesThrowable(cdef: CaseDef)(using Context): Boolean = + catchesAllOf(cdef, defn.ThrowableType) + + /** Does this CaseDef catch everything of a certain Type? */ + def catchesAllOf(cdef: CaseDef, threshold: Type)(using Context): Boolean = + isDefaultCase(cdef) || + cdef.guard.isEmpty && { + unbind(cdef.pat) match { + case Typed(Ident(nme.WILDCARD), tpt) => threshold <:< tpt.typeOpt + case _ => false + } + } + + /** Is this case guarded? */ + def isGuardedCase(cdef: CaseDef): Boolean = cdef.guard ne EmptyTree + + /** Is this parameter list a using clause? */ + def isUsingClause(params: ParamClause)(using Context): Boolean = params match + case ValDefs(vparam :: _) => + val sym = vparam.symbol + if sym.exists then sym.is(Given) else vparam.mods.is(Given) + case _ => + false + + def isUsingOrTypeParamClause(params: ParamClause)(using Context): Boolean = params match + case TypeDefs(_) => true + case _ => isUsingClause(params) + + def isTypeParamClause(params: ParamClause)(using Context): Boolean = params match + case TypeDefs(_) => true + case _ => false + + private val languageSubCategories = Set(nme.experimental, nme.deprecated) + + /** If `path` looks like a language import, `Some(name)` where name + * is `experimental` if that sub-module is imported, and the empty + * term name otherwise. + */ + def languageImport(path: Tree): Option[TermName] = path match + case Select(p1, name: TermName) if languageSubCategories.contains(name) => + languageImport(p1) match + case Some(EmptyTermName) => Some(name) + case _ => None + case p1: RefTree if p1.name == nme.language => + p1.qualifier match + case EmptyTree => Some(EmptyTermName) + case p2: RefTree if p2.name == nme.scala => + p2.qualifier match + case EmptyTree => Some(EmptyTermName) + case Ident(nme.ROOTPKG) => Some(EmptyTermName) + case _ => None + case _ => None + case _ => None + + /** The underlying pattern ignoring any bindings */ + def unbind(x: Tree): Tree = unsplice(x) match { + case Bind(_, y) => unbind(y) + case y => y + } + + /** The largest subset of {NoInits, PureInterface} that a + * trait or class with these parents can have as flags. + */ + def parentsKind(parents: List[Tree])(using Context): FlagSet = parents match { + case Nil => NoInitsInterface + case Apply(_, _ :: _) :: _ => EmptyFlags + case _ :: parents1 => parentsKind(parents1) + } + + /** Checks whether predicate `p` is true for all result parts of this expression, + * where we zoom into Ifs, Matches, and Blocks. + */ + def forallResults(tree: Tree, p: Tree => Boolean): Boolean = tree match { + case If(_, thenp, elsep) => forallResults(thenp, p) && forallResults(elsep, p) + case Match(_, cases) => cases forall (c => forallResults(c.body, p)) + case Block(_, expr) => forallResults(expr, p) + case _ => p(tree) + } +} + +trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => + import untpd._ + + /** The underlying tree when stripping any TypedSplice or Parens nodes */ + override def unsplice(tree: Tree): Tree = tree match { + case TypedSplice(tree1) => tree1 + case Parens(tree1) => unsplice(tree1) + case _ => tree + } + + def functionWithUnknownParamType(tree: Tree): Option[Tree] = tree match { + case Function(args, _) => + if (args.exists { + case ValDef(_, tpt, _) => tpt.isEmpty + case _ => false + }) Some(tree) + else None + case Match(EmptyTree, _) => + Some(tree) + case Block(Nil, expr) => + functionWithUnknownParamType(expr) + case _ => + None + } + + def isFunctionWithUnknownParamType(tree: Tree): Boolean = + functionWithUnknownParamType(tree).isDefined + + def isFunction(tree: Tree): Boolean = tree match + case Function(_, _) | Match(EmptyTree, _) => true + case Block(Nil, expr) => isFunction(expr) + case _ => false + + /** Is `tree` an context function or closure, possibly nested in a block? */ + def isContextualClosure(tree: Tree)(using Context): Boolean = unsplice(tree) match { + case tree: FunctionWithMods => tree.mods.is(Given) + case Function((param: untpd.ValDef) :: _, _) => param.mods.is(Given) + case Closure(_, meth, _) => true + case Block(Nil, expr) => isContextualClosure(expr) + case Block(DefDef(nme.ANON_FUN, params :: _, _, _) :: Nil, cl: Closure) => + if params.isEmpty then + cl.tpt.eq(untpd.ContextualEmptyTree) || defn.isContextFunctionType(cl.tpt.typeOpt) + else + isUsingClause(params) + case _ => false + } + + /** The largest subset of {NoInits, PureInterface} that a + * trait or class enclosing this statement can have as flags. + */ + private def defKind(tree: Tree)(using Context): FlagSet = unsplice(tree) match { + case EmptyTree | _: Import => NoInitsInterface + case tree: TypeDef => if (tree.isClassDef) NoInits else NoInitsInterface + case tree: DefDef => + if tree.unforcedRhs == EmptyTree + && tree.paramss.forall { + case ValDefs(vparams) => vparams.forall(_.rhs.isEmpty) + case _ => true + } + then + NoInitsInterface + else if tree.mods.is(Given) && tree.paramss.isEmpty then + EmptyFlags // might become a lazy val: TODO: check whether we need to suppress NoInits once we have new lazy val impl + else + NoInits + case tree: ValDef => if (tree.unforcedRhs == EmptyTree) NoInitsInterface else EmptyFlags + case _ => EmptyFlags + } + + /** The largest subset of {NoInits, PureInterface} that a + * trait or class with this body can have as flags. + */ + def bodyKind(body: List[Tree])(using Context): FlagSet = + body.foldLeft(NoInitsInterface)((fs, stat) => fs & defKind(stat)) + + /** Info of a variable in a pattern: The named tree and its type */ + type VarInfo = (NameTree, Tree) + + /** An extractor for trees of the form `id` or `id: T` */ + object IdPattern { + def unapply(tree: Tree)(using Context): Option[VarInfo] = tree match { + case id: Ident if id.name != nme.WILDCARD => Some(id, TypeTree()) + case Typed(id: Ident, tpt) => Some((id, tpt)) + case _ => None + } + } + + /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. + * Only trees of the form `=> T` are matched; trees written directly as `{*}-> T` + * are ignored by the extractor. + */ + object ImpureByNameTypeTree: + + def apply(tp: ByNameTypeTree)(using Context): untpd.CapturingTypeTree = + untpd.CapturingTypeTree( + untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp) + + def unapply(tp: Tree)(using Context): Option[ByNameTypeTree] = tp match + case untpd.CapturingTypeTree(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) + if id.span == bntp.span.startPos => Some(bntp) + case _ => None + end ImpureByNameTypeTree +} + +trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => + import TreeInfo._ + import tpd._ + + /** The purity level of this statement. + * @return Pure if statement has no side effects + * Idempotent if running the statement a second time has no side effects + * Impure otherwise + */ + def statPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { + case EmptyTree + | TypeDef(_, _) + | Import(_, _) + | DefDef(_, _, _, _) => + Pure + case vdef @ ValDef(_, _, _) => + if (vdef.symbol.flags is Mutable) Impure else exprPurity(vdef.rhs) `min` Pure + case _ => + Impure + // TODO: It seem like this should be exprPurity(tree) + // But if we do that the repl/vars test break. Need to figure out why that's the case. + } + + /** The purity level of this expression. See docs for PurityLevel for what that means + * + * Note that purity and idempotency are treated differently. + * References to modules and lazy vals are impure (side-effecting) both because + * side-effecting code may be executed and because the first reference + * takes a different code path than all to follow; but they are idempotent + * because running the expression a second time gives the cached result. + */ + def exprPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { + case EmptyTree + | This(_) + | Super(_, _) + | Literal(_) => + PurePath + case Ident(_) => + refPurity(tree) + case Select(qual, _) => + if (tree.symbol.is(Erased)) Pure + else refPurity(tree) `min` exprPurity(qual) + case New(_) | Closure(_, _, _) => + Pure + case TypeApply(fn, _) => + if (fn.symbol.is(Erased) || fn.symbol == defn.QuotedTypeModule_of || fn.symbol == defn.Predef_classOf) Pure else exprPurity(fn) + case Apply(fn, args) => + if isPureApply(tree, fn) then + minOf(exprPurity(fn), args.map(exprPurity)) `min` Pure + else if fn.symbol.is(Erased) then + Pure + else if fn.symbol.isStableMember /* && fn.symbol.is(Lazy) */ then + minOf(exprPurity(fn), args.map(exprPurity)) `min` Idempotent + else + Impure + case Typed(expr, _) => + exprPurity(expr) + case Block(stats, expr) => + minOf(exprPurity(expr), stats.map(statPurity)) + case Inlined(_, bindings, expr) => + minOf(exprPurity(expr), bindings.map(statPurity)) + case NamedArg(_, expr) => + exprPurity(expr) + case _ => + Impure + } + + private def minOf(l0: PurityLevel, ls: List[PurityLevel]) = ls.foldLeft(l0)(_ `min` _) + + def isPurePath(tree: Tree)(using Context): Boolean = tree.tpe match { + case tpe: ConstantType => exprPurity(tree) >= Pure + case _ => exprPurity(tree) == PurePath + } + + def isPureExpr(tree: Tree)(using Context): Boolean = + exprPurity(tree) >= Pure + + def isIdempotentPath(tree: Tree)(using Context): Boolean = tree.tpe match { + case tpe: ConstantType => exprPurity(tree) >= Idempotent + case _ => exprPurity(tree) >= IdempotentPath + } + + def isIdempotentExpr(tree: Tree)(using Context): Boolean = + exprPurity(tree) >= Idempotent + + def isPureBinding(tree: Tree)(using Context): Boolean = statPurity(tree) >= Pure + + /** Is the application `tree` with function part `fn` known to be pure? + * Function value and arguments can still be impure. + */ + def isPureApply(tree: Tree, fn: Tree)(using Context): Boolean = + def isKnownPureOp(sym: Symbol) = + sym.owner.isPrimitiveValueClass + || sym.owner == defn.StringClass + || defn.pureMethods.contains(sym) + tree.tpe.isInstanceOf[ConstantType] && tree.symbol != NoSymbol && isKnownPureOp(tree.symbol) // A constant expression with pure arguments is pure. + || fn.symbol.isStableMember && !fn.symbol.is(Lazy) // constructors of no-inits classes are stable + + /** The purity level of this reference. + * @return + * PurePath if reference is (nonlazy and stable) + * or to a parameterized function + * or its type is a constant type + * IdempotentPath if reference is lazy and stable + * Impure otherwise + * @DarkDimius: need to make sure that lazy accessor methods have Lazy and Stable + * flags set. + */ + def refPurity(tree: Tree)(using Context): PurityLevel = { + val sym = tree.symbol + if (!tree.hasType) Impure + else if !tree.tpe.widen.isParameterless then PurePath + else if sym.is(Erased) then PurePath + else if tree.tpe.isInstanceOf[ConstantType] then PurePath + else if (!sym.isStableMember) Impure + else if (sym.is(Module)) + if (sym.moduleClass.isNoInitsRealClass) PurePath else IdempotentPath + else if (sym.is(Lazy)) IdempotentPath + else if sym.isAllOf(InlineParam) then Impure + else PurePath + } + + def isPureRef(tree: Tree)(using Context): Boolean = + refPurity(tree) == PurePath + def isIdempotentRef(tree: Tree)(using Context): Boolean = + refPurity(tree) >= IdempotentPath + + /** (1) If `tree` is a constant expression, its value as a Literal, + * or `tree` itself otherwise. + * + * Note: Demanding idempotency instead of purity in literalize is strictly speaking too loose. + * Example + * + * object O { final val x = 42; println("43") } + * O.x + * + * Strictly speaking we can't replace `O.x` with `42`. But this would make + * most expressions non-constant. Maybe we can change the spec to accept this + * kind of eliding behavior. Or else enforce true purity in the compiler. + * The choice will be affected by what we will do with `inline` and with + * Singleton type bounds (see SIP 23). Presumably + * + * object O1 { val x: Singleton = 42; println("43") } + * object O2 { inline val x = 42; println("43") } + * + * should behave differently. + * + * O1.x should have the same effect as { println("43"); 42 } + * + * whereas + * + * O2.x = 42 + * + * Revisit this issue once we have standardized on `inline`. Then we can demand + * purity of the prefix unless the selection goes to a inline val. + * + * Note: This method should be applied to all term tree nodes that are not literals, + * that can be idempotent, and that can have constant types. So far, only nodes + * of the following classes qualify: + * + * Ident + * Select + * TypeApply + * + * (2) A primitive unary operator expression `pre.op` where `op` is one of `+`, `-`, `~`, `!` + * that has a constant type `ConstantType(v)` but that is not a constant expression + * (i.e. `pre` has side-effects) is translated to + * + * { pre; v } + * + * (3) An expression `pre.getClass[..]()` that has a constant type `ConstantType(v)` but where + * `pre` has side-effects is translated to: + * + * { pre; v } + * + * This avoids the situation where we have a Select node that does not have a symbol. + */ + def constToLiteral(tree: Tree)(using Context): Tree = { + assert(!tree.isType) + val tree1 = ConstFold(tree) + tree1.tpe.widenTermRefExpr.dealias.normalized match { + case ConstantType(Constant(_: Type)) if tree.isInstanceOf[Block] => + // We can't rewrite `{ class A; classOf[A] }` to `classOf[A]`, so we leave + // blocks returning a class literal alone, even if they're idempotent. + tree1 + case ConstantType(value) => + def dropOp(t: Tree): Tree = t match + case Select(pre, _) if t.tpe.isInstanceOf[ConstantType] => + // it's a primitive unary operator + pre + case Apply(TypeApply(Select(pre, nme.getClass_), _), Nil) => + pre + case _ => + tree1 + + val countsAsPure = + if dropOp(tree1).symbol.isInlineVal + then isIdempotentExpr(tree1) + else isPureExpr(tree1) + + if countsAsPure then Literal(value).withSpan(tree.span) + else + val pre = dropOp(tree1) + if pre eq tree1 then tree1 + else + // it's a primitive unary operator or getClass call; + // Simplify `pre.op` to `{ pre; v }` where `v` is the value of `pre.op` + Block(pre :: Nil, Literal(value)).withSpan(tree.span) + case _ => tree1 + } + } + + def isExtMethodApply(tree: Tree)(using Context): Boolean = methPart(tree) match + case Inlined(call, _, _) => isExtMethodApply(call) + case tree @ Select(qual, nme.apply) => tree.symbol.is(ExtensionMethod) || isExtMethodApply(qual) + case tree => tree.symbol.is(ExtensionMethod) + + /** Is symbol potentially a getter of a mutable variable? + */ + def mayBeVarGetter(sym: Symbol)(using Context): Boolean = { + def maybeGetterType(tpe: Type): Boolean = tpe match { + case _: ExprType => true + case tpe: MethodType => tpe.isImplicitMethod + case tpe: PolyType => maybeGetterType(tpe.resultType) + case _ => false + } + sym.owner.isClass && !sym.isStableMember && maybeGetterType(sym.info) + } + + /** Is tree a reference to a mutable variable, or to a potential getter + * that has a setter in the same class? + */ + def isVariableOrGetter(tree: Tree)(using Context): Boolean = { + def sym = tree.symbol + def isVar = sym.is(Mutable) + def isGetter = + mayBeVarGetter(sym) && sym.owner.info.member(sym.name.asTermName.setterName).exists + + unsplice(tree) match { + case Ident(_) => isVar + case Select(_, _) => isVar || isGetter + case Apply(_, _) => + methPart(tree) match { + case Select(qual, nme.apply) => qual.tpe.member(nme.update).exists + case _ => false + } + case _ => false + } + } + + /** Is tree a `this` node which belongs to `enclClass`? */ + def isSelf(tree: Tree, enclClass: Symbol)(using Context): Boolean = unsplice(tree) match { + case This(_) => tree.symbol == enclClass + case _ => false + } + + /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */ + def stripCast(tree: Tree)(using Context): Tree = { + def isCast(sel: Tree) = sel.symbol.isTypeCast + unsplice(tree) match { + case TypeApply(sel @ Select(inner, _), _) if isCast(sel) => + stripCast(inner) + case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCast(sel) => + stripCast(inner) + case t => + t + } + } + + /** The type arguments of a possibly curried call */ + def typeArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case TypeApply(fn, args) => loop(fn, args :: argss) + case Apply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + + /** The term arguments of a possibly curried call */ + def termArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case Apply(fn, args) => loop(fn, args :: argss) + case TypeApply(fn, args) => loop(fn, argss) + case _ => argss + loop(tree, Nil) + + /** The type and term arguments of a possibly curried call, in the order they are given */ + def allArgss(tree: Tree): List[List[Tree]] = + @tailrec + def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match + case tree: GenericApply => loop(tree.fun, tree.args :: argss) + case _ => argss + loop(tree, Nil) + + /** The function part of a possibly curried call. Unlike `methPart` this one does + * not decompose blocks + */ + def funPart(tree: Tree): Tree = tree match + case tree: GenericApply => funPart(tree.fun) + case tree => tree + + /** Decompose a template body into parameters and other statements */ + def decomposeTemplateBody(body: List[Tree])(using Context): (List[Tree], List[Tree]) = + body.partition { + case stat: TypeDef => stat.symbol is Flags.Param + case stat: ValOrDefDef => + stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter + case _ => false + } + + /** An extractor for closures, either contained in a block or standalone. + */ + object closure { + def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match { + case Block(_, expr) => unapply(expr) + case Closure(env, meth, tpt) => Some(env, meth, tpt) + case Typed(expr, _) => unapply(expr) + case _ => None + } + } + + /** An extractor for def of a closure contained the block of the closure. */ + object closureDef { + def unapply(tree: Tree)(using Context): Option[DefDef] = tree match { + case Block((meth : DefDef) :: Nil, closure: Closure) if meth.symbol == closure.meth.symbol => + Some(meth) + case Block(Nil, expr) => + unapply(expr) + case _ => + None + } + } + + /** If tree is a closure, its body, otherwise tree itself */ + def closureBody(tree: Tree)(using Context): Tree = tree match { + case closureDef(meth) => meth.rhs + case _ => tree + } + + /** The variables defined by a pattern, in reverse order of their appearance. */ + def patVars(tree: Tree)(using Context): List[Symbol] = { + val acc = new TreeAccumulator[List[Symbol]] { + def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { + case Bind(_, body) => apply(tree.symbol :: syms, body) + case Annotated(tree, id @ Ident(tpnme.BOUNDTYPE_ANNOT)) => apply(id.symbol :: syms, tree) + case _ => foldOver(syms, tree) + } + } + acc(Nil, tree) + } + + /** Is this pattern node a catch-all or type-test pattern? */ + def isCatchCase(cdef: CaseDef)(using Context): Boolean = cdef match { + case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) => + isSimpleThrowable(tpt.tpe) + case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) => + isSimpleThrowable(tpt.tpe) + case _ => + isDefaultCase(cdef) + } + + private def isSimpleThrowable(tp: Type)(using Context): Boolean = tp match { + case tp @ TypeRef(pre, _) => + (pre == NoPrefix || pre.typeSymbol.isStatic) && + (tp.symbol derivesFrom defn.ThrowableClass) && !tp.symbol.is(Trait) + case _ => + false + } + + /** The symbols defined locally in a statement list */ + def localSyms(stats: List[Tree])(using Context): List[Symbol] = + val locals = new mutable.ListBuffer[Symbol] + for stat <- stats do + if stat.isDef && stat.symbol.exists then locals += stat.symbol + locals.toList + + /** If `tree` is a DefTree, the symbol defined by it, otherwise NoSymbol */ + def definedSym(tree: Tree)(using Context): Symbol = + if (tree.isDef) tree.symbol else NoSymbol + + /** Going from child to parent, the path of tree nodes that starts + * with a definition of symbol `sym` and ends with `root`, or Nil + * if no such path exists. + * Pre: `sym` must have a position. + */ + def defPath(sym: Symbol, root: Tree)(using Context): List[Tree] = trace.onDebug(s"defpath($sym with position ${sym.span}, ${root.show})") { + require(sym.span.exists, sym) + object accum extends TreeAccumulator[List[Tree]] { + def apply(x: List[Tree], tree: Tree)(using Context): List[Tree] = + if (tree.span.contains(sym.span)) + if (definedSym(tree) == sym) tree :: x + else { + val x1 = foldOver(x, tree) + if (x1 ne x) tree :: x1 else x1 + } + else x + } + accum(Nil, root) + } + + /** The top level classes in this tree, including only those module classes that + * are not a linked class of some other class in the result. + */ + def topLevelClasses(tree: Tree)(using Context): List[ClassSymbol] = tree match { + case PackageDef(_, stats) => stats.flatMap(topLevelClasses) + case tdef: TypeDef if tdef.symbol.isClass => tdef.symbol.asClass :: Nil + case _ => Nil + } + + /** The tree containing only the top-level classes and objects matching either `cls` or its companion object */ + def sliceTopLevel(tree: Tree, cls: ClassSymbol)(using Context): List[Tree] = tree match { + case PackageDef(pid, stats) => + val slicedStats = stats.flatMap(sliceTopLevel(_, cls)) + val isEffectivelyEmpty = slicedStats.forall(_.isInstanceOf[Import]) + if isEffectivelyEmpty then Nil + else cpy.PackageDef(tree)(pid, slicedStats) :: Nil + case tdef: TypeDef => + val sym = tdef.symbol + assert(sym.isClass) + if (cls == sym || cls == sym.linkedClass) tdef :: Nil + else Nil + case vdef: ValDef => + val sym = vdef.symbol + assert(sym.is(Module)) + if (cls == sym.companionClass || cls == sym.moduleClass) vdef :: Nil + else Nil + case tree => + tree :: Nil + } + + /** The statement sequence that contains a definition of `sym`, or Nil + * if none was found. + * For a tree to be found, The symbol must have a position and its definition + * tree must be reachable from come tree stored in an enclosing context. + */ + def definingStats(sym: Symbol)(using Context): List[Tree] = + if (!sym.span.exists || (ctx eq NoContext) || (ctx.compilationUnit eq NoCompilationUnit)) Nil + else defPath(sym, ctx.compilationUnit.tpdTree) match { + case defn :: encl :: _ => + def verify(stats: List[Tree]) = + if (stats exists (definedSym(_) == sym)) stats else Nil + encl match { + case Block(stats, _) => verify(stats) + case encl: Template => verify(encl.body) + case PackageDef(_, stats) => verify(stats) + case _ => Nil + } + case nil => + Nil + } + + /** If `tree` is an instance of `TupleN[...](e1, ..., eN)`, the arguments `e1, ..., eN` + * otherwise the empty list. + */ + def tupleArgs(tree: Tree)(using Context): List[Tree] = tree match { + case Block(Nil, expr) => tupleArgs(expr) + case Inlined(_, Nil, expr) => tupleArgs(expr) + case Apply(fn: NameTree, args) + if fn.name == nme.apply && + fn.symbol.owner.is(Module) && + defn.isTupleClass(fn.symbol.owner.companionClass) => args + case _ => Nil + } + + /** The qualifier part of a Select or Ident. + * For an Ident, this is the `This` of the current class. + */ + def qualifier(tree: Tree)(using Context): Tree = tree match { + case Select(qual, _) => qual + case tree: Ident => desugarIdentPrefix(tree) + case _ => This(ctx.owner.enclosingClass.asClass) + } + + /** Is this a (potentially applied) selection of a member of a structural type + * that is not a member of an underlying class or trait? + */ + def isStructuralTermSelectOrApply(tree: Tree)(using Context): Boolean = { + def isStructuralTermSelect(tree: Select) = + def hasRefinement(qualtpe: Type): Boolean = qualtpe.dealias match + case RefinedType(parent, rname, rinfo) => + rname == tree.name || hasRefinement(parent) + case tp: TypeProxy => + hasRefinement(tp.superType) + case tp: AndType => + hasRefinement(tp.tp1) || hasRefinement(tp.tp2) + case tp: OrType => + hasRefinement(tp.tp1) || hasRefinement(tp.tp2) + case _ => + false + !tree.symbol.exists + && tree.isTerm + && { + val qualType = tree.qualifier.tpe + hasRefinement(qualType) && !qualType.derivesFrom(defn.PolyFunctionClass) + } + def loop(tree: Tree): Boolean = tree match + case TypeApply(fun, _) => + loop(fun) + case Apply(fun, _) => + loop(fun) + case tree: Select => + isStructuralTermSelect(tree) + case _ => + false + loop(tree) + } + + /** Return a pair consisting of (supercall, rest) + * + * - supercall: the superclass call, excluding trait constr calls + * + * The supercall is always the first statement (if it exists) + */ + final def splitAtSuper(constrStats: List[Tree])(implicit ctx: Context): (List[Tree], List[Tree]) = + constrStats.toList match { + case (sc: Apply) :: rest if sc.symbol.isConstructor => (sc :: Nil, rest) + case (block @ Block(_, sc: Apply)) :: rest if sc.symbol.isConstructor => (block :: Nil, rest) + case stats => (Nil, stats) + } + + /** Structural tree comparison (since == on trees is reference equality). + * For the moment, only Ident, Select, Literal, Apply and TypeApply are supported + */ + extension (t1: Tree) { + def === (t2: Tree)(using Context): Boolean = (t1, t2) match { + case (t1: Ident, t2: Ident) => + t1.symbol == t2.symbol + case (t1 @ Select(q1, _), t2 @ Select(q2, _)) => + t1.symbol == t2.symbol && q1 === q2 + case (Literal(c1), Literal(c2)) => + c1 == c2 + case (Apply(f1, as1), Apply(f2, as2)) => + f1 === f2 && as1.corresponds(as2)(_ === _) + case (TypeApply(f1, ts1), TypeApply(f2, ts2)) => + f1 === f2 && ts1.tpes.corresponds(ts2.tpes)(_ =:= _) + case _ => + false + } + def hash(using Context): Int = + t1.getClass.hashCode * 37 + { + t1 match { + case t1: Ident => t1.symbol.hashCode + case t1 @ Select(q1, _) => t1.symbol.hashCode * 41 + q1.hash + case Literal(c1) => c1.hashCode + case Apply(f1, as1) => as1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.hash) + case TypeApply(f1, ts1) => ts1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.tpe.hash) + case _ => t1.hashCode + } + } + } + + def assertAllPositioned(tree: Tree)(using Context): Unit = + tree.foreachSubTree { + case t: WithoutTypeOrPos[_] => + case t => assert(t.span.exists, i"$t") + } + + /** Extractors for quotes */ + object Quoted { + /** Extracts the content of a quoted tree. + * The result can be the contents of a term or type quote, which + * will return a term or type tree respectively. + */ + def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = + if tree.symbol == defn.QuotedRuntime_exprQuote then + // quoted.runtime.Expr.quote[T]() + Some(tree.args.head) + else if tree.symbol == defn.QuotedTypeModule_of then + // quoted.Type.of[](quotes) + val TypeApply(_, body :: _) = tree.fun: @unchecked + Some(body) + else None + } + + /** Extractors for splices */ + object Spliced { + /** Extracts the content of a spliced expression tree. + * The result can be the contents of a term splice, which + * will return a term tree. + */ + def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = + if tree.symbol.isExprSplice then Some(tree.args.head) else None + } + + /** Extractors for type splices */ + object SplicedType { + /** Extracts the content of a spliced type tree. + * The result can be the contents of a type splice, which + * will return a type tree. + */ + def unapply(tree: tpd.Select)(using Context): Option[tpd.Tree] = + if tree.symbol.isTypeSplice then Some(tree.qualifier) else None + } + + /** Extractor for not-null assertions. + * A not-null assertion for reference `x` has the form `x.$asInstanceOf$[x.type & T]`. + */ + object AssertNotNull : + def apply(tree: tpd.Tree, tpnn: Type)(using Context): tpd.Tree = + tree.select(defn.Any_typeCast).appliedToType(AndType(tree.tpe, tpnn)) + + def unapply(tree: tpd.TypeApply)(using Context): Option[tpd.Tree] = tree match + case TypeApply(Select(qual: RefTree, nme.asInstanceOfPM), arg :: Nil) => + arg.tpe match + case AndType(ref, nn1) if qual.tpe eq ref => + qual.tpe.widen match + case OrNull(nn2) if nn1 eq nn2 => + Some(qual) + case _ => None + case _ => None + case _ => None + end AssertNotNull + + object ConstantValue { + def unapply(tree: Tree)(using Context): Option[Any] = + tree match + case Typed(expr, _) => unapply(expr) + case Inlined(_, Nil, expr) => unapply(expr) + case Block(Nil, expr) => unapply(expr) + case _ => + tree.tpe.widenTermRefExpr.normalized match + case ConstantType(Constant(x)) => Some(x) + case _ => None + } +} + +object TreeInfo { + /** A purity level is represented as a bitset (expressed as an Int) */ + class PurityLevel(val x: Int) extends AnyVal { + /** `this` contains the bits of `that` */ + def >= (that: PurityLevel): Boolean = (x & that.x) == that.x + + /** The intersection of the bits of `this` and `that` */ + def min(that: PurityLevel): PurityLevel = new PurityLevel(x & that.x) + } + + /** An expression is a stable path. Requires that expression is at least idempotent */ + val Path: PurityLevel = new PurityLevel(4) + + /** The expression has no side effects */ + val Pure: PurityLevel = new PurityLevel(3) + + /** Running the expression a second time has no side effects. Implied by `Pure`. */ + val Idempotent: PurityLevel = new PurityLevel(1) + + val Impure: PurityLevel = new PurityLevel(0) + + /** A stable path that is evaluated without side effects */ + val PurePath: PurityLevel = new PurityLevel(Pure.x | Path.x) + + /** A stable path that is also idempotent */ + val IdempotentPath: PurityLevel = new PurityLevel(Idempotent.x | Path.x) +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala new file mode 100644 index 000000000000..caf8d68442f6 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala @@ -0,0 +1,82 @@ +package dotty.tools.dotc +package ast + +import Trees._ +import core.Contexts._ +import core.ContextOps.enter +import core.Flags._ +import core.Symbols._ +import core.TypeError + +/** A TreeMap that maintains the necessary infrastructure to support + * contextual implicit searches (type-scope implicits are supported anyway). + * + * This incudes implicits defined in scope as well as imported implicits. + */ +class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { + import tpd._ + + def transformSelf(vd: ValDef)(using Context): ValDef = + cpy.ValDef(vd)(tpt = transform(vd.tpt)) + + private def nestedScopeCtx(defs: List[Tree])(using Context): Context = { + val nestedCtx = ctx.fresh.setNewScope + defs foreach { + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) + case _ => + } + nestedCtx + } + + private def patternScopeCtx(pattern: Tree)(using Context): Context = { + val nestedCtx = ctx.fresh.setNewScope + new TreeTraverser { + def traverse(tree: Tree)(using Context): Unit = { + tree match { + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => + nestedCtx.enter(d.symbol) + case _ => + } + traverseChildren(tree) + } + }.traverse(pattern) + nestedCtx + } + + override def transform(tree: Tree)(using Context): Tree = { + try tree match { + case Block(stats, expr) => + super.transform(tree)(using nestedScopeCtx(stats)) + case tree: DefDef => + inContext(localCtx(tree)) { + cpy.DefDef(tree)( + tree.name, + transformParamss(tree.paramss), + transform(tree.tpt), + transform(tree.rhs)(using nestedScopeCtx(tree.paramss.flatten))) + } + case impl @ Template(constr, parents, self, _) => + cpy.Template(tree)( + transformSub(constr), + transform(parents)(using ctx.superCallContext), + Nil, + transformSelf(self), + transformStats(impl.body, tree.symbol)) + case tree: CaseDef => + val patCtx = patternScopeCtx(tree.pat)(using ctx) + cpy.CaseDef(tree)( + transform(tree.pat), + transform(tree.guard)(using patCtx), + transform(tree.body)(using patCtx) + ) + case _ => + super.transform(tree) + } + catch { + case ex: TypeError => + report.error(ex, tree.srcPos) + tree + } + } +} + diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala new file mode 100644 index 000000000000..3b250118f9b3 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala @@ -0,0 +1,232 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Types._, Contexts._, Flags._ +import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant +import Decorators._ +import dotty.tools.dotc.transform.SymUtils._ +import language.experimental.pureFunctions + +/** A map that applies three functions and a substitution together to a tree and + * makes sure they are coordinated so that the result is well-typed. The functions are + * @param typeMap A function from Type to Type that gets applied to the + * type of every tree node and to all locally defined symbols, + * followed by the substitution [substFrom := substTo]. + * @param treeMap A transformer that translates all encountered subtrees in + * prefix traversal orders + * @param oldOwners Previous owners. If a top-level local symbol in the mapped tree + * has one of these as an owner, the owner is replaced by the corresponding + * symbol in `newOwners`. + * @param newOwners New owners, replacing previous owners. + * @param substFrom The symbols that need to be substituted. + * @param substTo The substitution targets. + * + * The reason the substitution is broken out from the rest of the type map is + * that all symbols have to be substituted at the same time. If we do not do this, + * we risk data races on named types. Example: Say we have `outer#1.inner#2` and we + * have two substitutions S1 = [outer#1 := outer#3], S2 = [inner#2 := inner#4] where + * hashtags precede symbol ids. If we do S1 first, we get outer#2.inner#3. If we then + * do S2 we get outer#2.inner#4. But that means that the named type outer#2.inner + * gets two different denotations in the same period. Hence, if -Yno-double-bindings is + * set, we would get a data race assertion error. + */ +class TreeTypeMap( + val typeMap: Type -> Type = IdentityTypeMap, + val treeMap: tpd.Tree -> tpd.Tree = identity[tpd.Tree](_), // !cc! need explicit instantiation of default argument + val oldOwners: List[Symbol] = Nil, + val newOwners: List[Symbol] = Nil, + val substFrom: List[Symbol] = Nil, + val substTo: List[Symbol] = Nil, + cpy: tpd.TreeCopier = tpd.cpy)(using DetachedContext) extends tpd.TreeMap(cpy) { + import tpd._ + + def copy( + typeMap: Type -> Type, + treeMap: tpd.Tree -> tpd.Tree, + oldOwners: List[Symbol], + newOwners: List[Symbol], + substFrom: List[Symbol], + substTo: List[Symbol])(using Context): TreeTypeMap = + new TreeTypeMap(typeMap, treeMap, oldOwners, newOwners, substFrom, substTo) + + /** If `sym` is one of `oldOwners`, replace by corresponding symbol in `newOwners` */ + def mapOwner(sym: Symbol): Symbol = sym.subst(oldOwners, newOwners) + + /** Replace occurrences of `This(oldOwner)` in some prefix of a type + * by the corresponding `This(newOwner)`. + */ + private val mapOwnerThis = new TypeMap with cc.CaptureSet.IdempotentCaptRefMap { + private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match { + case Nil => tp + case (cls: ClassSymbol) :: from1 => mapPrefix(from1, to.tail, tp.substThis(cls, to.head.thisType)) + case _ :: from1 => mapPrefix(from1, to.tail, tp) + } + def apply(tp: Type): Type = tp match { + case tp: NamedType => tp.derivedSelect(mapPrefix(oldOwners, newOwners, tp.prefix)) + case _ => mapOver(tp) + } + } + + def mapType(tp: Type): Type = + mapOwnerThis(typeMap(tp).substSym(substFrom, substTo)) + + private def updateDecls(prevStats: List[Tree], newStats: List[Tree]): Unit = + if (prevStats.isEmpty) assert(newStats.isEmpty) + else { + prevStats.head match { + case pdef: MemberDef => + val prevSym = pdef.symbol + val newSym = newStats.head.symbol + val newCls = newSym.owner.asClass + if (prevSym != newSym) newCls.replace(prevSym, newSym) + case _ => + } + updateDecls(prevStats.tail, newStats.tail) + } + + def transformInlined(tree: tpd.Inlined)(using Context): tpd.Tree = + val Inlined(call, bindings, expanded) = tree + val (tmap1, bindings1) = transformDefs(bindings) + val expanded1 = tmap1.transform(expanded) + cpy.Inlined(tree)(call, bindings1, expanded1) + + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = treeMap(tree) match { + case impl @ Template(constr, parents, self, _) => + val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) + cpy.Template(impl)( + constr = tmap.transformSub(constr), + parents = parents.mapconserve(transform), + self = tmap.transformSub(self), + body = impl.body mapconserve + (tmap.transform(_)(using ctx.withOwner(mapOwner(impl.symbol.owner)))) + ).withType(tmap.mapType(impl.tpe)) + case tree1 => + tree1.withType(mapType(tree1.tpe)) match { + case id: Ident if tpd.needsSelect(id.tpe) => + ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + case ddef @ DefDef(name, paramss, tpt, _) => + val (tmap1, paramss1) = transformAllParamss(paramss) + val res = cpy.DefDef(ddef)(name, paramss1, tmap1.transform(tpt), tmap1.transform(ddef.rhs)) + res.symbol.setParamssFromDefs(paramss1) + res.symbol.transformAnnotations { + case ann: BodyAnnotation => ann.derivedAnnotation(transform(ann.tree)) + case ann => ann + } + res + case tdef @ LambdaTypeTree(tparams, body) => + val (tmap1, tparams1) = transformDefs(tparams) + cpy.LambdaTypeTree(tdef)(tparams1, tmap1.transform(body)) + case blk @ Block(stats, expr) => + val (tmap1, stats1) = transformDefs(stats) + val expr1 = tmap1.transform(expr) + cpy.Block(blk)(stats1, expr1) + case inlined: Inlined => + transformInlined(inlined) + case cdef @ CaseDef(pat, guard, rhs) => + val tmap = withMappedSyms(patVars(pat)) + val pat1 = tmap.transform(pat) + val guard1 = tmap.transform(guard) + val rhs1 = tmap.transform(rhs) + cpy.CaseDef(cdef)(pat1, guard1, rhs1) + case labeled @ Labeled(bind, expr) => + val tmap = withMappedSyms(bind.symbol :: Nil) + val bind1 = tmap.transformSub(bind) + val expr1 = tmap.transform(expr) + cpy.Labeled(labeled)(bind1, expr1) + case tree @ Hole(_, _, args, content, tpt) => + val args1 = args.mapConserve(transform) + val content1 = transform(content) + val tpt1 = transform(tpt) + cpy.Hole(tree)(args = args1, content = content1, tpt = tpt1) + case lit @ Literal(Constant(tpe: Type)) => + cpy.Literal(lit)(Constant(mapType(tpe))) + case tree1 => + super.transform(tree1) + } + } + + override def transformStats(trees: List[tpd.Tree], exprOwner: Symbol)(using Context): List[Tree] = + transformDefs(trees)._2 + + def transformDefs[TT <: tpd.Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { + val tmap = withMappedSyms(tpd.localSyms(trees)) + (tmap, tmap.transformSub(trees)) + } + + private def transformAllParamss(paramss: List[ParamClause]): (TreeTypeMap, List[ParamClause]) = paramss match + case params :: paramss1 => + val (tmap1, params1: ParamClause) = ((params: @unchecked) match + case ValDefs(vparams) => transformDefs(vparams) + case TypeDefs(tparams) => transformDefs(tparams) + ): @unchecked + val (tmap2, paramss2) = tmap1.transformAllParamss(paramss1) + (tmap2, params1 :: paramss2) + case nil => + (this, paramss) + + def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] + + def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree)) + + /** The current tree map composed with a substitution [from -> to] */ + def withSubstitution(from: List[Symbol], to: List[Symbol]): TreeTypeMap = + if (from eq to) this + else { + // assert that substitution stays idempotent, assuming its parts are + // TODO: It might be better to cater for the asserted-away conditions, by + // setting up a proper substitution abstraction with a compose operator that + // guarantees idempotence. But this might be too inefficient in some cases. + // We'll cross that bridge when we need to. + assert(!from.exists(substTo contains _)) + assert(!to.exists(substFrom contains _)) + assert(!from.exists(newOwners contains _)) + assert(!to.exists(oldOwners contains _)) + copy( + typeMap, + treeMap, + from ++ oldOwners, + to ++ newOwners, + from ++ substFrom, + to ++ substTo) + } + + /** Apply `typeMap` and `ownerMap` to given symbols `syms` + * and return a treemap that contains the substitution + * between original and mapped symbols. + */ + def withMappedSyms(syms: List[Symbol]): TreeTypeMap = + withMappedSyms(syms, mapSymbols(syms, this)) + + /** The tree map with the substitution between originals `syms` + * and mapped symbols `mapped`. Also goes into mapped classes + * and substitutes their declarations. + */ + def withMappedSyms(syms: List[Symbol], mapped: List[Symbol]): TreeTypeMap = + if syms eq mapped then this + else + val substMap = withSubstitution(syms, mapped) + lazy val origCls = mapped.zip(syms).filter(_._1.isClass).toMap + mapped.filter(_.isClass).foldLeft(substMap) { (tmap, cls) => + val origDcls = cls.info.decls.toList.filterNot(_.is(TypeParam)) + val tmap0 = tmap.withSubstitution(origCls(cls).typeParams, cls.typeParams) + val mappedDcls = mapSymbols(origDcls, tmap0, mapAlways = true) + val tmap1 = tmap.withMappedSyms( + origCls(cls).typeParams ::: origDcls, + cls.typeParams ::: mappedDcls) + origDcls.lazyZip(mappedDcls).foreach(cls.asClass.replace) + tmap1 + } + + override def toString = + def showSyms(syms: List[Symbol]) = + syms.map(sym => s"$sym#${sym.id}").mkString(", ") + s"""TreeTypeMap( + |typeMap = $typeMap + |treeMap = $treeMap + |oldOwners = ${showSyms(oldOwners)} + |newOwners = ${showSyms(newOwners)} + |substFrom = ${showSyms(substFrom)} + |substTo = ${showSyms(substTo)}""".stripMargin +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala new file mode 100644 index 000000000000..0b1842603316 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala @@ -0,0 +1,1787 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Types._, Names._, NameOps._, Flags._, util.Spans._, Contexts._, Constants._ +import typer.{ ConstFold, ProtoTypes } +import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._ +import collection.mutable.ListBuffer +import printing.Printer +import printing.Texts.Text +import util.{Stats, Attachment, Property, SourceFile, NoSource, SrcPos, SourcePosition} +import config.Config +import config.Printers.overload +import annotation.internal.sharable +import annotation.unchecked.uncheckedVariance +import annotation.constructorOnly +import compiletime.uninitialized +import Decorators._ +import annotation.retains +import language.experimental.pureFunctions + +object Trees { + + type Untyped = Type | Null + + /** The total number of created tree nodes, maintained if Stats.enabled */ + @sharable var ntrees: Int = 0 + + /** Property key for trees with documentation strings attached */ + val DocComment: Property.StickyKey[Comments.Comment] = Property.StickyKey() + + /** Property key for backquoted identifiers and definitions */ + val Backquoted: Property.StickyKey[Unit] = Property.StickyKey() + + /** Trees take a parameter indicating what the type of their `tpe` field + * is. Two choices: `Type` or `Untyped`. + * Untyped trees have type `Tree[Untyped]`. + * + * Tree typing uses a copy-on-write implementation: + * + * - You can never observe a `tpe` which is `null` (throws an exception) + * - So when creating a typed tree with `withType` we can re-use + * the existing tree transparently, assigning its `tpe` field. + * - It is impossible to embed untyped trees in typed ones. + * - Typed trees can be embedded in untyped ones provided they are rooted + * in a TypedSplice node. + * - Type checking an untyped tree should remove all embedded `TypedSplice` + * nodes. + */ + abstract class Tree[+T <: Untyped](implicit @constructorOnly src: SourceFile) + extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable { + + if (Stats.enabled) ntrees += 1 + + /** The type constructor at the root of the tree */ + type ThisTree[T <: Untyped] <: Tree[T] + + protected var myTpe: T @uncheckedVariance = uninitialized + + /** Destructively set the type of the tree. This should be called only when it is known that + * it is safe under sharing to do so. One use-case is in the withType method below + * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer, + * where we overwrite with a simplified version of the type itself. + */ + private[dotc] def overwriteType(tpe: T @uncheckedVariance): Unit = + myTpe = tpe + + /** The type of the tree. In case of an untyped tree, + * an UnAssignedTypeException is thrown. (Overridden by empty trees) + */ + final def tpe: T = + if myTpe == null then throw UnAssignedTypeException(this) + myTpe.uncheckedNN + + /** Copy `tpe` attribute from tree `from` into this tree, independently + * whether it is null or not. + final def copyAttr[U <: Untyped](from: Tree[U]): ThisTree[T] = { + val t1 = this.withSpan(from.span) + val t2 = + if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type]) + else t1 + t2.asInstanceOf[ThisTree[T]] + } + */ + + /** Return a typed tree that's isomorphic to this tree, but has given + * type. (Overridden by empty trees) + */ + def withType(tpe: Type)(using Context): ThisTree[Type] = { + if (tpe.isInstanceOf[ErrorType]) + assert(!Config.checkUnreportedErrors || + ctx.reporter.errorsReported || + ctx.settings.YshowPrintErrors.value + // under -Yshow-print-errors, errors might arise during printing, but they do not count as reported + ) + else if (Config.checkTreesConsistent) + checkChildrenTyped(productIterator) + withTypeUnchecked(tpe) + } + + /** Check that typed trees don't refer to untyped ones, except if + * - the parent tree is an import, or + * - the child tree is an identifier, or + * - errors were reported + */ + private def checkChildrenTyped(it: Iterator[Any])(using Context): Unit = + if (!this.isInstanceOf[Import[?]]) + while (it.hasNext) + it.next() match { + case x: Ident[?] => // untyped idents are used in a number of places in typed trees + case x: Tree[?] => + assert(x.hasType || ctx.reporter.errorsReported, + s"$this has untyped child $x") + case xs: List[?] => checkChildrenTyped(xs.iterator) + case _ => + } + + def withTypeUnchecked(tpe: Type): ThisTree[Type] = { + val tree = + (if (myTpe == null || + (myTpe.asInstanceOf[AnyRef] eq tpe.asInstanceOf[AnyRef])) this + else cloneIn(source)).asInstanceOf[Tree[Type]] + tree overwriteType tpe + tree.asInstanceOf[ThisTree[Type]] + } + + /** Does the tree have its type field set? Note: this operation is not + * referentially transparent, because it can observe the withType + * modifications. Should be used only in special circumstances (we + * need it for printing trees with optional type info). + */ + final def hasType: Boolean = myTpe != null + + final def typeOpt: Type = myTpe match + case tp: Type => tp + case null => NoType + + /** The denotation referred to by this tree. + * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other + * kinds of trees + */ + def denot(using Context): Denotation = NoDenotation + + /** Shorthand for `denot.symbol`. */ + final def symbol(using Context): Symbol = denot.symbol + + /** Does this tree represent a type? */ + def isType: Boolean = false + + /** Does this tree represent a term? */ + def isTerm: Boolean = false + + /** Is this a legal part of a pattern which is not at the same time a term? */ + def isPattern: Boolean = false + + /** Does this tree define a new symbol that is not defined elsewhere? */ + def isDef: Boolean = false + + /** Is this tree either the empty tree or the empty ValDef or an empty type ident? */ + def isEmpty: Boolean = false + + /** Convert tree to a list. Gives a singleton list, except + * for thickets which return their element trees. + */ + def toList: List[Tree[T]] = this :: Nil + + /** if this tree is the empty tree, the alternative, else this tree */ + inline def orElse[U >: T <: Untyped](inline that: Tree[U]): Tree[U] = + if (this eq genericEmptyTree) that else this + + /** The number of nodes in this tree */ + def treeSize: Int = { + var s = 1 + def addSize(elem: Any): Unit = elem match { + case t: Tree[?] => s += t.treeSize + case ts: List[?] => ts foreach addSize + case _ => + } + productIterator foreach addSize + s + } + + /** If this is a thicket, perform `op` on each of its trees + * otherwise, perform `op` ion tree itself. + */ + def foreachInThicket(op: Tree[T] => Unit): Unit = op(this) + + override def toText(printer: Printer): Text = printer.toText(this) + + def sameTree(that: Tree[?]): Boolean = { + def isSame(x: Any, y: Any): Boolean = + x.asInstanceOf[AnyRef].eq(y.asInstanceOf[AnyRef]) || { + x match { + case x: Tree[?] => + y match { + case y: Tree[?] => x.sameTree(y) + case _ => false + } + case x: List[?] => + y match { + case y: List[?] => x.corresponds(y)(isSame) + case _ => false + } + case _ => + false + } + } + this.getClass == that.getClass && { + val it1 = this.productIterator + val it2 = that.productIterator + it1.corresponds(it2)(isSame) + } + } + + override def hashCode(): Int = System.identityHashCode(this) + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + } + + class UnAssignedTypeException[T <: Untyped](tree: Tree[T]) extends RuntimeException { + override def getMessage: String = s"type of $tree is not assigned" + } + + type LazyTree[+T <: Untyped] = Tree[T] | Lazy[Tree[T]] + type LazyTreeList[+T <: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] + + // ------ Categories of trees ----------------------------------- + + /** Instances of this class are trees for which isType is definitely true. + * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) + */ + trait TypTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TypTree[T] + override def isType: Boolean = true + } + + /** Instances of this class are trees for which isTerm is definitely true. + * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) + */ + trait TermTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: TermTree[T] + override def isTerm: Boolean = true + } + + /** Instances of this class are trees which are not terms but are legal + * parts of patterns. + */ + trait PatternTree[+T <: Untyped] extends Tree[T] { + type ThisTree[+T <: Untyped] <: PatternTree[T] + override def isPattern: Boolean = true + } + + /** Tree's denotation can be derived from its type */ + abstract class DenotingTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: DenotingTree[T] + override def denot(using Context): Denotation = typeOpt.stripped match + case tpe: NamedType => tpe.denot + case tpe: ThisType => tpe.cls.denot + case _ => NoDenotation + } + + /** Tree's denot/isType/isTerm properties come from a subtree + * identified by `forwardTo`. + */ + abstract class ProxyTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: ProxyTree[T] + def forwardTo: Tree[T] + override def denot(using Context): Denotation = forwardTo.denot + override def isTerm: Boolean = forwardTo.isTerm + override def isType: Boolean = forwardTo.isType + } + + /** Tree has a name */ + abstract class NameTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: NameTree[T] + def name: Name + } + + /** Tree refers by name to a denotation */ + abstract class RefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { + type ThisTree[+T <: Untyped] <: RefTree[T] + def qualifier: Tree[T] + override def isType: Boolean = name.isTypeName + override def isTerm: Boolean = name.isTermName + } + + /** Tree defines a new symbol */ + trait DefTree[+T <: Untyped] extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: DefTree[T] + + private var myMods: untpd.Modifiers | Null = uninitialized + + private[dotc] def rawMods: untpd.Modifiers = + if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN + + def withAnnotations(annots: List[untpd.Tree]): ThisTree[Untyped] = withMods(rawMods.withAnnotations(annots)) + + def withMods(mods: untpd.Modifiers): ThisTree[Untyped] = { + val tree = if (myMods == null || (myMods == mods)) this else cloneIn(source) + tree.setMods(mods) + tree.asInstanceOf[ThisTree[Untyped]] + } + + def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(untpd.Modifiers(flags)) + def withAddedFlags(flags: FlagSet): ThisTree[Untyped] = withMods(rawMods | flags) + + /** Destructively update modifiers. To be used with care. */ + def setMods(mods: untpd.Modifiers): Unit = myMods = mods + + override def isDef: Boolean = true + def namedType: NamedType = tpe.asInstanceOf[NamedType] + } + + extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods + + sealed trait WithEndMarker[+T <: Untyped]: + self: PackageDef[T] | NamedDefTree[T] => + + import WithEndMarker.* + + final def endSpan(using Context): Span = + if hasEndMarker then + val realName = srcName.stripModuleClassSuffix.lastPart + span.withStart(span.end - realName.length) + else + NoSpan + + /** The name in source code that represents this construct, + * and is the name that the user must write to create a valid + * end marker. + * e.g. a constructor definition is terminated in the source + * code by `end this`, so it's `srcName` should return `this`. + */ + protected def srcName(using Context): Name + + final def withEndMarker(): self.type = + self.withAttachment(HasEndMarker, ()) + + final def withEndMarker(copyFrom: WithEndMarker[?]): self.type = + if copyFrom.hasEndMarker then + this.withEndMarker() + else + this + + final def dropEndMarker(): self.type = + self.removeAttachment(HasEndMarker) + this + + protected def hasEndMarker: Boolean = self.hasAttachment(HasEndMarker) + + object WithEndMarker: + /** Property key that signals the tree was terminated + * with an `end` marker in the source code + */ + private val HasEndMarker: Property.StickyKey[Unit] = Property.StickyKey() + + end WithEndMarker + + abstract class NamedDefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) + extends NameTree[T] with DefTree[T] with WithEndMarker[T] { + type ThisTree[+T <: Untyped] <: NamedDefTree[T] + + protected def srcName(using Context): Name = + if name == nme.CONSTRUCTOR then nme.this_ + else if symbol.isPackageObject then symbol.owner.name + else name + + /** The position of the name defined by this definition. + * This is a point position if the definition is synthetic, or a range position + * if the definition comes from source. + * It might also be that the definition does not have a position (for instance when synthesized by + * a calling chain from `viewExists`), in that case the return position is NoSpan. + * Overridden in Bind + */ + def nameSpan(using Context): Span = + if (span.exists) { + val point = span.point + if (rawMods.is(Synthetic) || span.isSynthetic || name.toTermName == nme.ERROR) Span(point) + else { + val realName = srcName.stripModuleClassSuffix.lastPart + Span(point, point + realName.length, point) + } + } + else span + + /** The source position of the name defined by this definition. + * This is a point position if the definition is synthetic, or a range position + * if the definition comes from source. + */ + def namePos(using Context): SourcePosition = source.atSpan(nameSpan) + } + + /** Tree defines a new symbol and carries modifiers. + * The position of a MemberDef contains only the defined identifier or pattern. + * The envelope of a MemberDef contains the whole definition and has its point + * on the opening keyword (or the next token after that if keyword is missing). + */ + abstract class MemberDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { + type ThisTree[+T <: Untyped] <: MemberDef[T] + + def rawComment: Option[Comment] = getAttachment(DocComment) + + def setComment(comment: Option[Comment]): this.type = { + comment.map(putAttachment(DocComment, _)) + this + } + + def name: Name + } + + /** A ValDef or DefDef tree */ + abstract class ValOrDefDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { + type ThisTree[+T <: Untyped] <: ValOrDefDef[T] + def name: TermName + def tpt: Tree[T] + def unforcedRhs: LazyTree[T] = unforced + def rhs(using Context): Tree[T] = forceIfLazy + } + + trait ValOrTypeDef[+T <: Untyped] extends MemberDef[T]: + type ThisTree[+T <: Untyped] <: ValOrTypeDef[T] + + type ParamClause[T <: Untyped] = List[ValDef[T]] | List[TypeDef[T]] + + // ----------- Tree case classes ------------------------------------ + + /** name */ + case class Ident[+T <: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) + extends RefTree[T] { + type ThisTree[+T <: Untyped] = Ident[T] + def qualifier: Tree[T] = genericEmptyTree + + def isBackquoted: Boolean = hasAttachment(Backquoted) + } + + class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: -> String)(implicit @constructorOnly src: SourceFile) + extends Ident[T](name) { + def explanation = expl + override def toString: String = s"SearchFailureIdent($explanation)" + } + + /** qualifier.name, or qualifier#name, if qualifier is a type */ + case class Select[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) + extends RefTree[T] { + type ThisTree[+T <: Untyped] = Select[T] + + override def denot(using Context): Denotation = typeOpt match + case ConstantType(_) if ConstFold.foldedUnops.contains(name) => + // Recover the denotation of a constant-folded selection + qualifier.typeOpt.member(name).atSignature(Signature.NotAMethod, name) + case _ => + super.denot + + def nameSpan(using Context): Span = + if span.exists then + val point = span.point + if name.toTermName == nme.ERROR then + Span(point) + else if qualifier.span.start > span.start then // right associative + val realName = name.stripModuleClassSuffix.lastPart + Span(span.start, span.start + realName.length, point) + else + Span(point, span.end, point) + else span + } + + class SelectWithSig[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) + extends Select[T](qualifier, name) { + override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" + } + + /** qual.this */ + case class This[+T <: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] = This[T] + // Denotation of a This tree is always the underlying class; needs correction for modules. + override def denot(using Context): Denotation = + typeOpt match { + case tpe @ TermRef(pre, _) if tpe.symbol.is(Module) => + tpe.symbol.moduleClass.denot.asSeenFrom(pre) + case _ => + super.denot + } + } + + /** C.super[mix], where qual = C.this */ + case class Super[+T <: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] = Super[T] + def forwardTo: Tree[T] = qual + } + + abstract class GenericApply[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] <: GenericApply[T] + val fun: Tree[T] + val args: List[Tree[T]] + def forwardTo: Tree[T] = fun + } + + object GenericApply: + def unapply[T <: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match + case tree: GenericApply[T] => Some((tree.fun, tree.args)) + case _ => None + + /** The kind of application */ + enum ApplyKind: + case Regular // r.f(x) + case Using // r.f(using x) + case InfixTuple // r f (x1, ..., xN) where N != 1; needs to be treated specially for an error message in typedApply + + /** fun(args) */ + case class Apply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends GenericApply[T] { + type ThisTree[+T <: Untyped] = Apply[T] + + def setApplyKind(kind: ApplyKind) = + putAttachment(untpd.KindOfApply, kind) + this + + /** The kind of this application. Works reliably only for untyped trees; typed trees + * are under no obligation to update it correctly. + */ + def applyKind: ApplyKind = + attachmentOrElse(untpd.KindOfApply, ApplyKind.Regular) + } + + /** fun[args] */ + case class TypeApply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends GenericApply[T] { + type ThisTree[+T <: Untyped] = TypeApply[T] + } + + /** const */ + case class Literal[+T <: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) + extends Tree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] = Literal[T] + } + + /** new tpt, but no constructor call */ + case class New[+T <: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] = New[T] + } + + /** expr : tpt */ + case class Typed[+T <: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TermTree[T] { + type ThisTree[+T <: Untyped] = Typed[T] + def forwardTo: Tree[T] = expr + } + + /** name = arg, in a parameter list */ + case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[+T <: Untyped] = NamedArg[T] + } + + /** name = arg, outside a parameter list */ + case class Assign[+T <: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = Assign[T] + } + + /** { stats; expr } */ + case class Block[+T <: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[+T <: Untyped] = Block[T] + override def isType: Boolean = expr.isType + override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary + } + + /** if cond then thenp else elsep */ + case class If[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = If[T] + def isInline = false + } + class InlineIf[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) + extends If(cond, thenp, elsep) { + override def isInline = true + override def toString = s"InlineIf($cond, $thenp, $elsep)" + } + + /** A closure with an environment and a reference to a method. + * @param env The captured parameters of the closure + * @param meth A ref tree that refers to the method of the closure. + * The first (env.length) parameters of that method are filled + * with env values. + * @param tpt Either EmptyTree or a TypeTree. If tpt is EmptyTree the type + * of the closure is a function type, otherwise it is the type + * given in `tpt`, which must be a SAM type. + */ + case class Closure[+T <: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = Closure[T] + } + + /** selector match { cases } */ + case class Match[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = Match[T] + def isInline = false + } + class InlineMatch[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + extends Match(selector, cases) { + override def isInline = true + override def toString = s"InlineMatch($selector, $cases)" + } + + /** case pat if guard => body */ + case class CaseDef[+T <: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[+T <: Untyped] = CaseDef[T] + } + + /** label[tpt]: { expr } */ + case class Labeled[+T <: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) + extends NameTree[T] { + type ThisTree[+T <: Untyped] = Labeled[T] + def name: Name = bind.name + } + + /** return expr + * where `from` refers to the method or label from which the return takes place + * After program transformations this is not necessarily the enclosing method, because + * closures can intervene. + */ + case class Return[+T <: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = Return[T] + } + + /** while (cond) { body } */ + case class WhileDo[+T <: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = WhileDo[T] + } + + /** try block catch cases finally finalizer */ + case class Try[+T <: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = Try[T] + } + + /** Seq(elems) + * @param tpt The element type of the sequence. + */ + case class SeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[+T <: Untyped] = SeqLiteral[T] + } + + /** Array(elems) */ + class JavaSeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) + extends SeqLiteral(elems, elemtpt) { + override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" + } + + /** A tree representing inlined code. + * + * @param call Info about the original call that was inlined + * Until PostTyper, this is the full call, afterwards only + * a reference to the toplevel class from which the call was inlined. + * @param bindings Bindings for proxies to be used in the inlined code + * @param expansion The inlined tree, minus bindings. + * + * The full inlined code is equivalent to + * + * { bindings; expansion } + * + * The reason to keep `bindings` separate is because they are typed in a + * different context: `bindings` represent the arguments to the inlined + * call, whereas `expansion` represents the body of the inlined function. + */ + case class Inlined[+T <: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) + extends Tree[T] { + type ThisTree[+T <: Untyped] = Inlined[T] + override def isTerm = expansion.isTerm + override def isType = expansion.isType + } + + /** A type tree that represents an existing or inferred type */ + case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] with TypTree[T] { + type ThisTree[+T <: Untyped] = TypeTree[T] + override def isEmpty: Boolean = !hasType + override def toString: String = + s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" + } + + /** A type tree whose type is inferred. These trees appear in two contexts + * - as an argument of a TypeApply. In that case its type is always a TypeVar + * - as a (result-)type of an inferred ValDef or DefDef. + * Every TypeVar is created as the type of one InferredTypeTree. + */ + class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] + + /** ref.type */ + case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] with TypTree[T] { + type ThisTree[+T <: Untyped] = SingletonTypeTree[T] + } + + /** tpt { refinements } */ + case class RefinedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TypTree[T] { + type ThisTree[+T <: Untyped] = RefinedTypeTree[T] + def forwardTo: Tree[T] = tpt + } + + /** tpt[args] */ + case class AppliedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with TypTree[T] { + type ThisTree[+T <: Untyped] = AppliedTypeTree[T] + def forwardTo: Tree[T] = tpt + } + + /** [typeparams] -> tpt + * + * Note: the type of such a tree is not necessarily a `HKTypeLambda`, it can + * also be a `TypeBounds` where the upper bound is an `HKTypeLambda`, and the + * lower bound is either a reference to `Nothing` or an `HKTypeLambda`, + * this happens because these trees are typed by `HKTypeLambda#fromParams` which + * makes sure to move bounds outside of the type lambda itself to simplify their + * handling in the compiler. + * + * You may ask: why not normalize the trees too? That way, + * + * LambdaTypeTree(X, TypeBoundsTree(A, B)) + * + * would become, + * + * TypeBoundsTree(LambdaTypeTree(X, A), LambdaTypeTree(X, B)) + * + * which would maintain consistency between a tree and its type. The problem + * with this definition is that the same tree `X` appears twice, therefore + * we'd have to create two symbols for it which makes it harder to relate the + * source code written by the user with the trees used by the compiler (for + * example, to make "find all references" work in the IDE). + */ + case class LambdaTypeTree[+T <: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[+T <: Untyped] = LambdaTypeTree[T] + } + + case class TermLambdaTypeTree[+T <: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[+T <: Untyped] = TermLambdaTypeTree[T] + } + + /** [bound] selector match { cases } */ + case class MatchTypeTree[+T <: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[+T <: Untyped] = MatchTypeTree[T] + } + + /** => T */ + case class ByNameTypeTree[+T <: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[+T <: Untyped] = ByNameTypeTree[T] + } + + /** >: lo <: hi + * >: lo <: hi = alias for RHS of bounded opaque type + */ + case class TypeBoundsTree[+T <: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TypTree[T] { + type ThisTree[+T <: Untyped] = TypeBoundsTree[T] + } + + /** name @ body */ + case class Bind[+T <: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) + extends NamedDefTree[T] with PatternTree[T] { + type ThisTree[+T <: Untyped] = Bind[T] + override def isType: Boolean = name.isTypeName + override def isTerm: Boolean = name.isTermName + + override def nameSpan(using Context): Span = + if span.exists then Span(span.start, span.start + name.toString.length) else span + } + + /** tree_1 | ... | tree_n */ + case class Alternative[+T <: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends PatternTree[T] { + type ThisTree[+T <: Untyped] = Alternative[T] + } + + /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following + * components: + * + * @param fun is `extractor.unapply` (or, for backwards compatibility, `extractor.unapplySeq`) + * possibly with type parameters + * @param implicits Any implicit parameters passed to the unapply after the selector + * @param patterns The argument patterns in the pattern match. + * + * It is typed with same type as first `fun` argument + * Given a match selector `sel` a pattern UnApply(fun, implicits, patterns) is roughly translated as follows + * + * val result = fun(sel)(implicits) + * if (result.isDefined) "match patterns against result" + */ + case class UnApply[+T <: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with PatternTree[T] { + type ThisTree[+T <: Untyped] = UnApply[T] + def forwardTo = fun + } + + /** mods val name: tpt = rhs */ + case class ValDef[+T <: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) + extends ValOrDefDef[T], ValOrTypeDef[T] { + type ThisTree[+T <: Untyped] = ValDef[T] + assert(isEmpty || (tpt ne genericEmptyTree)) + def unforced: LazyTree[T] = preRhs + protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + } + + /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ + case class DefDef[+T <: Untyped] private[ast] (name: TermName, + paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) + extends ValOrDefDef[T] { + type ThisTree[+T <: Untyped] = DefDef[T] + assert(tpt ne genericEmptyTree) + def unforced: LazyTree[T] = preRhs + protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x + + def leadingTypeParams(using Context): List[TypeDef[T]] = paramss match + case (tparams @ (tparam: TypeDef[_]) :: _) :: _ => tparams.asInstanceOf[List[TypeDef[T]]] + case _ => Nil + + def trailingParamss(using Context): List[ParamClause[T]] = paramss match + case ((tparam: TypeDef[_]) :: _) :: paramss1 => paramss1 + case _ => paramss + + def termParamss(using Context): List[List[ValDef[T]]] = + (if ctx.erasedTypes then paramss else untpd.termParamssIn(paramss)) + .asInstanceOf[List[List[ValDef[T]]]] + } + + /** mods class name template or + * mods trait name template or + * mods type name = rhs or + * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) or + * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods + */ + case class TypeDef[+T <: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) + extends MemberDef[T], ValOrTypeDef[T] { + type ThisTree[+T <: Untyped] = TypeDef[T] + + /** Is this a definition of a class? */ + def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] + + def isBackquoted: Boolean = hasAttachment(Backquoted) + } + + /** extends parents { self => body } + * @param parentsOrDerived A list of parents followed by a list of derived classes, + * if this is of class untpd.DerivingTemplate. + * Typed templates only have parents. + */ + case class Template[+T <: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T])(implicit @constructorOnly src: SourceFile) + extends DefTree[T] with WithLazyField[List[Tree[T]]] { + type ThisTree[+T <: Untyped] = Template[T] + def unforcedBody: LazyTreeList[T] = unforced + def unforced: LazyTreeList[T] = preBody + protected def force(x: List[Tree[T @uncheckedVariance]]): Unit = preBody = x + def body(using Context): List[Tree[T]] = forceIfLazy + + def parents: List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate + def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate + } + + + abstract class ImportOrExport[+T <: Untyped](implicit @constructorOnly src: SourceFile) + extends DenotingTree[T] { + type ThisTree[+T <: Untyped] <: ImportOrExport[T] + val expr: Tree[T] + val selectors: List[untpd.ImportSelector] + } + + /** import expr.selectors + * where a selector is either an untyped `Ident`, `name` or + * an untyped thicket consisting of `name` and `rename`. + */ + case class Import[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + extends ImportOrExport[T] { + type ThisTree[+T <: Untyped] = Import[T] + } + + /** export expr.selectors + * where a selector is either an untyped `Ident`, `name` or + * an untyped thicket consisting of `name` and `rename`. + */ + case class Export[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) + extends ImportOrExport[T] { + type ThisTree[+T <: Untyped] = Export[T] + } + + /** package pid { stats } */ + case class PackageDef[+T <: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] with WithEndMarker[T] { + type ThisTree[+T <: Untyped] = PackageDef[T] + def forwardTo: RefTree[T] = pid + protected def srcName(using Context): Name = pid.name + } + + /** arg @annot */ + case class Annotated[+T <: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) + extends ProxyTree[T] { + type ThisTree[+T <: Untyped] = Annotated[T] + def forwardTo: Tree[T] = arg + } + + trait WithoutTypeOrPos[+T <: Untyped] extends Tree[T] { + override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] + override def span: Span = NoSpan + override def span_=(span: Span): Unit = {} + } + + /** Temporary class that results from translation of ModuleDefs + * (and possibly other statements). + * The contained trees will be integrated when transformed with + * a `transform(List[Tree])` call. + */ + case class Thicket[+T <: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends Tree[T] with WithoutTypeOrPos[T] { + myTpe = NoType.asInstanceOf[T] + type ThisTree[+T <: Untyped] = Thicket[T] + + def mapElems[U >: T <: Untyped](op: Tree[T] => Tree[U]): Thicket[U] = { + val newTrees = trees.mapConserve(op) + if (trees eq newTrees) + this + else + Thicket[U](newTrees)(source).asInstanceOf[this.type] + } + + override def foreachInThicket(op: Tree[T] => Unit): Unit = + trees foreach (_.foreachInThicket(op)) + + override def isEmpty: Boolean = trees.isEmpty + override def toList: List[Tree[T]] = flatten(trees) + override def toString: String = if (isEmpty) "EmptyTree" else "Thicket(" + trees.mkString(", ") + ")" + override def span: Span = + def combine(s: Span, ts: List[Tree[T]]): Span = ts match + case t :: ts1 => combine(s.union(t.span), ts1) + case nil => s + combine(NoSpan, trees) + + override def withSpan(span: Span): this.type = + mapElems(_.withSpan(span)).asInstanceOf[this.type] + } + + class EmptyTree[T <: Untyped] extends Thicket(Nil)(NoSource) { + // assert(uniqueId != 1492) + override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") + } + + class EmptyValDef[T <: Untyped] extends ValDef[T]( + nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { + myTpe = NoType.asInstanceOf[T] + setMods(untpd.Modifiers(PrivateLocal)) + override def isEmpty: Boolean = true + override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyValDef") + } + + @sharable val theEmptyTree = new EmptyTree[Type]() + @sharable val theEmptyValDef = new EmptyValDef[Type]() + + def genericEmptyValDef[T <: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] + def genericEmptyTree[T <: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] + + /** Tree that replaces a level 1 splices in pickled (level 0) quotes. + * It is only used when picking quotes (will never be in a TASTy file). + * + * @param isTermHole If this hole is a term, otherwise it is a type hole. + * @param idx The index of the hole in it's enclosing level 0 quote. + * @param args The arguments of the splice to compute its content + * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. + * @param tpt Type of the hole + */ + case class Hole[+T <: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: Hole[T] + override def isTerm: Boolean = isTermHole + override def isType: Boolean = !isTermHole + } + + def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { + def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = + remaining match { + case Thicket(elems) :: remaining1 => + var buf1 = buf + if (buf1 == null) { + buf1 = new ListBuffer[Tree[T]] + var scanned = trees + while (scanned `ne` remaining) { + buf1 += scanned.head + scanned = scanned.tail + } + } + recur(recur(buf1, elems), remaining1) + case tree :: remaining1 => + if (buf != null) buf += tree + recur(buf, remaining1) + case nil => + buf + } + val buf = recur(null, trees) + if (buf != null) buf.toList else trees + } + + // ----- Lazy trees and tree sequences + + /** A tree that can have a lazy field + * The field is represented by some private `var` which is + * accessed by `unforced` and `force`. Forcing the field will + * set the `var` to the underlying value. + */ + trait WithLazyField[+T <: AnyRef] { + def unforced: T | Lazy[T] + protected def force(x: T @uncheckedVariance): Unit + def forceIfLazy(using Context): T = unforced match { + case lzy: Lazy[T @unchecked] => + val x = lzy.complete + force(x) + x + case x: T @ unchecked => x + } + } + + /** A base trait for lazy tree fields. + * These can be instantiated with Lazy instances which + * can delay tree construction until the field is first demanded. + */ + trait Lazy[+T <: AnyRef] { + def complete(using Context): T + } + + // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. + + abstract class Instance[T <: Untyped] { inst => + + type Tree = Trees.Tree[T] + type TypTree = Trees.TypTree[T] + type TermTree = Trees.TermTree[T] + type PatternTree = Trees.PatternTree[T] + type DenotingTree = Trees.DenotingTree[T] + type ProxyTree = Trees.ProxyTree[T] + type NameTree = Trees.NameTree[T] + type RefTree = Trees.RefTree[T] + type DefTree = Trees.DefTree[T] + type NamedDefTree = Trees.NamedDefTree[T] + type MemberDef = Trees.MemberDef[T] + type ValOrDefDef = Trees.ValOrDefDef[T] + type ValOrTypeDef = Trees.ValOrTypeDef[T] + type LazyTree = Trees.LazyTree[T] + type LazyTreeList = Trees.LazyTreeList[T] + type ParamClause = Trees.ParamClause[T] + + type Ident = Trees.Ident[T] + type SearchFailureIdent = Trees.SearchFailureIdent[T] + type Select = Trees.Select[T] + type SelectWithSig = Trees.SelectWithSig[T] + type This = Trees.This[T] + type Super = Trees.Super[T] + type Apply = Trees.Apply[T] + type TypeApply = Trees.TypeApply[T] + type GenericApply = Trees.GenericApply[T] + type Literal = Trees.Literal[T] + type New = Trees.New[T] + type Typed = Trees.Typed[T] + type NamedArg = Trees.NamedArg[T] + type Assign = Trees.Assign[T] + type Block = Trees.Block[T] + type If = Trees.If[T] + type InlineIf = Trees.InlineIf[T] + type Closure = Trees.Closure[T] + type Match = Trees.Match[T] + type InlineMatch = Trees.InlineMatch[T] + type CaseDef = Trees.CaseDef[T] + type Labeled = Trees.Labeled[T] + type Return = Trees.Return[T] + type WhileDo = Trees.WhileDo[T] + type Try = Trees.Try[T] + type SeqLiteral = Trees.SeqLiteral[T] + type JavaSeqLiteral = Trees.JavaSeqLiteral[T] + type Inlined = Trees.Inlined[T] + type TypeTree = Trees.TypeTree[T] + type InferredTypeTree = Trees.InferredTypeTree[T] + type SingletonTypeTree = Trees.SingletonTypeTree[T] + type RefinedTypeTree = Trees.RefinedTypeTree[T] + type AppliedTypeTree = Trees.AppliedTypeTree[T] + type LambdaTypeTree = Trees.LambdaTypeTree[T] + type TermLambdaTypeTree = Trees.TermLambdaTypeTree[T] + type MatchTypeTree = Trees.MatchTypeTree[T] + type ByNameTypeTree = Trees.ByNameTypeTree[T] + type TypeBoundsTree = Trees.TypeBoundsTree[T] + type Bind = Trees.Bind[T] + type Alternative = Trees.Alternative[T] + type UnApply = Trees.UnApply[T] + type ValDef = Trees.ValDef[T] + type DefDef = Trees.DefDef[T] + type TypeDef = Trees.TypeDef[T] + type Template = Trees.Template[T] + type Import = Trees.Import[T] + type Export = Trees.Export[T] + type ImportOrExport = Trees.ImportOrExport[T] + type PackageDef = Trees.PackageDef[T] + type Annotated = Trees.Annotated[T] + type Thicket = Trees.Thicket[T] + + type Hole = Trees.Hole[T] + + @sharable val EmptyTree: Thicket = genericEmptyTree + @sharable val EmptyValDef: ValDef = genericEmptyValDef + @sharable val ContextualEmptyTree: Thicket = new EmptyTree() // an empty tree marking a contextual closure + + // ----- Auxiliary creation methods ------------------ + + def Thicket(): Thicket = EmptyTree + def Thicket(x1: Tree, x2: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: Nil) + def Thicket(x1: Tree, x2: Tree, x3: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: x3 :: Nil) + def Thicket(xs: List[Tree])(implicit src: SourceFile) = new Thicket(xs) + + def flatTree(xs: List[Tree])(implicit src: SourceFile): Tree = flatten(xs) match { + case x :: Nil => x + case ys => Thicket(ys) + } + + // ----- Helper classes for copying, transforming, accumulating ----------------- + + val cpy: TreeCopier + + /** A class for copying trees. The copy methods avoid creating a new tree + * If all arguments stay the same. + * + * Note: Some of the copy methods take a context. + * These are exactly those methods that are overridden in TypedTreeCopier + * so that they selectively retype themselves. Retyping needs a context. + */ + abstract class TreeCopier { + protected def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] + protected def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] + + /** Soucre of the copied tree */ + protected def sourceFile(tree: Tree): SourceFile = tree.source + + protected def finalize(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] = + Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") + postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) + + protected def finalize(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] = + Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") + postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) + + def Ident(tree: Tree)(name: Name)(using Context): Ident = tree match { + case tree: Ident if name == tree.name => tree + case _ => finalize(tree, untpd.Ident(name)(sourceFile(tree))) + } + def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = tree match { + case tree: SelectWithSig => + if ((qualifier eq tree.qualifier) && (name == tree.name)) tree + else finalize(tree, SelectWithSig(qualifier, name, tree.sig)(sourceFile(tree))) + case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree + case _ => finalize(tree, untpd.Select(qualifier, name)(sourceFile(tree))) + } + /** Copy Ident or Select trees */ + def Ref(tree: RefTree)(name: Name)(using Context): RefTree = tree match { + case Ident(_) => Ident(tree)(name) + case Select(qual, _) => Select(tree)(qual, name) + } + def This(tree: Tree)(qual: untpd.Ident)(using Context): This = tree match { + case tree: This if (qual eq tree.qual) => tree + case _ => finalize(tree, untpd.This(qual)(sourceFile(tree))) + } + def Super(tree: Tree)(qual: Tree, mix: untpd.Ident)(using Context): Super = tree match { + case tree: Super if (qual eq tree.qual) && (mix eq tree.mix) => tree + case _ => finalize(tree, untpd.Super(qual, mix)(sourceFile(tree))) + } + def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = tree match { + case tree: Apply if (fun eq tree.fun) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.Apply(fun, args)(sourceFile(tree))) + //.ensuring(res => res.uniqueId != 2213, s"source = $tree, ${tree.uniqueId}, ${tree.span}") + } + def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = tree match { + case tree: TypeApply if (fun eq tree.fun) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.TypeApply(fun, args)(sourceFile(tree))) + } + def Literal(tree: Tree)(const: Constant)(using Context): Literal = tree match { + case tree: Literal if const == tree.const => tree + case _ => finalize(tree, untpd.Literal(const)(sourceFile(tree))) + } + def New(tree: Tree)(tpt: Tree)(using Context): New = tree match { + case tree: New if (tpt eq tree.tpt) => tree + case _ => finalize(tree, untpd.New(tpt)(sourceFile(tree))) + } + def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = tree match { + case tree: Typed if (expr eq tree.expr) && (tpt eq tree.tpt) => tree + case tree => finalize(tree, untpd.Typed(expr, tpt)(sourceFile(tree))) + } + def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = tree match { + case tree: NamedArg if (name == tree.name) && (arg eq tree.arg) => tree + case _ => finalize(tree, untpd.NamedArg(name, arg)(sourceFile(tree))) + } + def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = tree match { + case tree: Assign if (lhs eq tree.lhs) && (rhs eq tree.rhs) => tree + case _ => finalize(tree, untpd.Assign(lhs, rhs)(sourceFile(tree))) + } + def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = tree match { + case tree: Block if (stats eq tree.stats) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.Block(stats, expr)(sourceFile(tree))) + } + def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = tree match { + case tree: If if (cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep) => tree + case tree: InlineIf => finalize(tree, untpd.InlineIf(cond, thenp, elsep)(sourceFile(tree))) + case _ => finalize(tree, untpd.If(cond, thenp, elsep)(sourceFile(tree))) + } + def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = tree match { + case tree: Closure if (env eq tree.env) && (meth eq tree.meth) && (tpt eq tree.tpt) => tree + case _ => finalize(tree, untpd.Closure(env, meth, tpt)(sourceFile(tree))) + } + def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = tree match { + case tree: Match if (selector eq tree.selector) && (cases eq tree.cases) => tree + case tree: InlineMatch => finalize(tree, untpd.InlineMatch(selector, cases)(sourceFile(tree))) + case _ => finalize(tree, untpd.Match(selector, cases)(sourceFile(tree))) + } + def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = tree match { + case tree: CaseDef if (pat eq tree.pat) && (guard eq tree.guard) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.CaseDef(pat, guard, body)(sourceFile(tree))) + } + def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = tree match { + case tree: Labeled if (bind eq tree.bind) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.Labeled(bind, expr)(sourceFile(tree))) + } + def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = tree match { + case tree: Return if (expr eq tree.expr) && (from eq tree.from) => tree + case _ => finalize(tree, untpd.Return(expr, from)(sourceFile(tree))) + } + def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = tree match { + case tree: WhileDo if (cond eq tree.cond) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.WhileDo(cond, body)(sourceFile(tree))) + } + def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = tree match { + case tree: Try if (expr eq tree.expr) && (cases eq tree.cases) && (finalizer eq tree.finalizer) => tree + case _ => finalize(tree, untpd.Try(expr, cases, finalizer)(sourceFile(tree))) + } + def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = tree match { + case tree: JavaSeqLiteral => + if ((elems eq tree.elems) && (elemtpt eq tree.elemtpt)) tree + else finalize(tree, untpd.JavaSeqLiteral(elems, elemtpt)) + case tree: SeqLiteral if (elems eq tree.elems) && (elemtpt eq tree.elemtpt) => tree + case _ => finalize(tree, untpd.SeqLiteral(elems, elemtpt)(sourceFile(tree))) + } + def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = tree match { + case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree + case _ => finalize(tree, untpd.Inlined(call, bindings, expansion)(sourceFile(tree))) + } + def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { + case tree: SingletonTypeTree if (ref eq tree.ref) => tree + case _ => finalize(tree, untpd.SingletonTypeTree(ref)(sourceFile(tree))) + } + def RefinedTypeTree(tree: Tree)(tpt: Tree, refinements: List[Tree])(using Context): RefinedTypeTree = tree match { + case tree: RefinedTypeTree if (tpt eq tree.tpt) && (refinements eq tree.refinements) => tree + case _ => finalize(tree, untpd.RefinedTypeTree(tpt, refinements)(sourceFile(tree))) + } + def AppliedTypeTree(tree: Tree)(tpt: Tree, args: List[Tree])(using Context): AppliedTypeTree = tree match { + case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args)(sourceFile(tree))) + } + def LambdaTypeTree(tree: Tree)(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = tree match { + case tree: LambdaTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.LambdaTypeTree(tparams, body)(sourceFile(tree))) + } + def TermLambdaTypeTree(tree: Tree)(params: List[ValDef], body: Tree)(using Context): TermLambdaTypeTree = tree match { + case tree: TermLambdaTypeTree if (params eq tree.params) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.TermLambdaTypeTree(params, body)(sourceFile(tree))) + } + def MatchTypeTree(tree: Tree)(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = tree match { + case tree: MatchTypeTree if (bound eq tree.bound) && (selector eq tree.selector) && (cases eq tree.cases) => tree + case _ => finalize(tree, untpd.MatchTypeTree(bound, selector, cases)(sourceFile(tree))) + } + def ByNameTypeTree(tree: Tree)(result: Tree)(using Context): ByNameTypeTree = tree match { + case tree: ByNameTypeTree if (result eq tree.result) => tree + case _ => finalize(tree, untpd.ByNameTypeTree(result)(sourceFile(tree))) + } + def TypeBoundsTree(tree: Tree)(lo: Tree, hi: Tree, alias: Tree)(using Context): TypeBoundsTree = tree match { + case tree: TypeBoundsTree if (lo eq tree.lo) && (hi eq tree.hi) && (alias eq tree.alias) => tree + case _ => finalize(tree, untpd.TypeBoundsTree(lo, hi, alias)(sourceFile(tree))) + } + def Bind(tree: Tree)(name: Name, body: Tree)(using Context): Bind = tree match { + case tree: Bind if (name eq tree.name) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.Bind(name, body)(sourceFile(tree))) + } + def Alternative(tree: Tree)(trees: List[Tree])(using Context): Alternative = tree match { + case tree: Alternative if (trees eq tree.trees) => tree + case _ => finalize(tree, untpd.Alternative(trees)(sourceFile(tree))) + } + def UnApply(tree: Tree)(fun: Tree, implicits: List[Tree], patterns: List[Tree])(using Context): UnApply = tree match { + case tree: UnApply if (fun eq tree.fun) && (implicits eq tree.implicits) && (patterns eq tree.patterns) => tree + case _ => finalize(tree, untpd.UnApply(fun, implicits, patterns)(sourceFile(tree))) + } + def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: LazyTree)(using Context): ValDef = tree match { + case tree: ValDef if (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree + case _ => finalize(tree, untpd.ValDef(name, tpt, rhs)(sourceFile(tree))) + } + def DefDef(tree: Tree)(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(using Context): DefDef = tree match { + case tree: DefDef if (name == tree.name) && (paramss eq tree.paramss) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree + case _ => finalize(tree, untpd.DefDef(name, paramss, tpt, rhs)(sourceFile(tree))) + } + def TypeDef(tree: Tree)(name: TypeName, rhs: Tree)(using Context): TypeDef = tree match { + case tree: TypeDef if (name == tree.name) && (rhs eq tree.rhs) => tree + case _ => finalize(tree, untpd.TypeDef(name, rhs)(sourceFile(tree))) + } + def Template(tree: Tree)(constr: DefDef, parents: List[Tree], derived: List[untpd.Tree], self: ValDef, body: LazyTreeList)(using Context): Template = tree match { + case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (derived eq tree.derived) && (self eq tree.self) && (body eq tree.unforcedBody) => tree + case tree => finalize(tree, untpd.Template(constr, parents, derived, self, body)(sourceFile(tree))) + } + def Import(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = tree match { + case tree: Import if (expr eq tree.expr) && (selectors eq tree.selectors) => tree + case _ => finalize(tree, untpd.Import(expr, selectors)(sourceFile(tree))) + } + def Export(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = tree match { + case tree: Export if (expr eq tree.expr) && (selectors eq tree.selectors) => tree + case _ => finalize(tree, untpd.Export(expr, selectors)(sourceFile(tree))) + } + def PackageDef(tree: Tree)(pid: RefTree, stats: List[Tree])(using Context): PackageDef = tree match { + case tree: PackageDef if (pid eq tree.pid) && (stats eq tree.stats) => tree + case _ => finalize(tree, untpd.PackageDef(pid, stats)(sourceFile(tree))) + } + def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = tree match { + case tree: Annotated if (arg eq tree.arg) && (annot eq tree.annot) => tree + case _ => finalize(tree, untpd.Annotated(arg, annot)(sourceFile(tree))) + } + def Thicket(tree: Tree)(trees: List[Tree])(using Context): Thicket = tree match { + case tree: Thicket if (trees eq tree.trees) => tree + case _ => finalize(tree, untpd.Thicket(trees)(sourceFile(tree))) + } + def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = tree match { + case tree: Hole if isTerm == tree.isTerm && idx == tree.idx && args.eq(tree.args) && content.eq(tree.content) && content.eq(tree.content) => tree + case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content, tpt)(sourceFile(tree))) + } + + // Copier methods with default arguments; these demand that the original tree + // is of the same class as the copy. We only include trees with more than 2 elements here. + def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = + If(tree: Tree)(cond, thenp, elsep) + def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = + Closure(tree: Tree)(env, meth, tpt) + def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = + CaseDef(tree: Tree)(pat, guard, body) + def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = + Try(tree: Tree)(expr, cases, finalizer) + def UnApply(tree: UnApply)(fun: Tree = tree.fun, implicits: List[Tree] = tree.implicits, patterns: List[Tree] = tree.patterns)(using Context): UnApply = + UnApply(tree: Tree)(fun, implicits, patterns) + def ValDef(tree: ValDef)(name: TermName = tree.name, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): ValDef = + ValDef(tree: Tree)(name, tpt, rhs) + def DefDef(tree: DefDef)(name: TermName = tree.name, paramss: List[ParamClause] = tree.paramss, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): DefDef = + DefDef(tree: Tree)(name, paramss, tpt, rhs) + def TypeDef(tree: TypeDef)(name: TypeName = tree.name, rhs: Tree = tree.rhs)(using Context): TypeDef = + TypeDef(tree: Tree)(name, rhs) + def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody)(using Context): Template = + Template(tree: Tree)(constr, parents, derived, self, body) + def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content, tpt: Tree = tree.tpt)(using Context): Hole = + Hole(tree: Tree)(isTerm, idx, args, content, tpt) + + } + + /** Hook to indicate that a transform of some subtree should be skipped */ + protected def skipTransform(tree: Tree)(using Context): Boolean = false + + /** For untyped trees, this is just the identity. + * For typed trees, a context derived form `ctx` that records `call` as the + * innermost enclosing call for which the inlined version is currently + * processed. + */ + protected def inlineContext(call: tpd.Tree)(using Context): Context = ctx + + /** The context to use when mapping or accumulating over a tree */ + def localCtx(tree: Tree)(using Context): Context + + /** The context to use when transforming a tree. + * It ensures that the source is correct, and that the local context is used if + * that's necessary for transforming the whole tree. + * TODO: ensure transform is always called with the correct context as argument + * @see https://github.com/lampepfl/dotty/pull/13880#discussion_r836395977 + */ + def transformCtx(tree: Tree)(using Context): Context = + val sourced = + if tree.source.exists && tree.source != ctx.source + then ctx.withSource(tree.source) + else ctx + tree match + case t: (MemberDef | PackageDef | LambdaTypeTree | TermLambdaTypeTree) => + localCtx(t)(using sourced) + case _ => + sourced + + abstract class TreeMap(val cpy: TreeCopier = inst.cpy) { self: TreeMap @retains(caps.cap) => + def transform(tree: Tree)(using Context): Tree = { + inContext(transformCtx(tree)) { + Stats.record(s"TreeMap.transform/$getClass") + if (skipTransform(tree)) tree + else tree match { + case Ident(name) => + tree + case Select(qualifier, name) => + cpy.Select(tree)(transform(qualifier), name) + case This(qual) => + tree + case Super(qual, mix) => + cpy.Super(tree)(transform(qual), mix) + case Apply(fun, args) => + cpy.Apply(tree)(transform(fun), transform(args)) + case TypeApply(fun, args) => + cpy.TypeApply(tree)(transform(fun), transform(args)) + case Literal(const) => + tree + case New(tpt) => + cpy.New(tree)(transform(tpt)) + case Typed(expr, tpt) => + cpy.Typed(tree)(transform(expr), transform(tpt)) + case NamedArg(name, arg) => + cpy.NamedArg(tree)(name, transform(arg)) + case Assign(lhs, rhs) => + cpy.Assign(tree)(transform(lhs), transform(rhs)) + case blk: Block => + transformBlock(blk) + case If(cond, thenp, elsep) => + cpy.If(tree)(transform(cond), transform(thenp), transform(elsep)) + case Closure(env, meth, tpt) => + cpy.Closure(tree)(transform(env), transform(meth), transform(tpt)) + case Match(selector, cases) => + cpy.Match(tree)(transform(selector), transformSub(cases)) + case CaseDef(pat, guard, body) => + cpy.CaseDef(tree)(transform(pat), transform(guard), transform(body)) + case Labeled(bind, expr) => + cpy.Labeled(tree)(transformSub(bind), transform(expr)) + case Return(expr, from) => + cpy.Return(tree)(transform(expr), transformSub(from)) + case WhileDo(cond, body) => + cpy.WhileDo(tree)(transform(cond), transform(body)) + case Try(block, cases, finalizer) => + cpy.Try(tree)(transform(block), transformSub(cases), transform(finalizer)) + case SeqLiteral(elems, elemtpt) => + cpy.SeqLiteral(tree)(transform(elems), transform(elemtpt)) + case Inlined(call, bindings, expansion) => + cpy.Inlined(tree)(call, transformSub(bindings), transform(expansion)(using inlineContext(call))) + case TypeTree() => + tree + case SingletonTypeTree(ref) => + cpy.SingletonTypeTree(tree)(transform(ref)) + case RefinedTypeTree(tpt, refinements) => + cpy.RefinedTypeTree(tree)(transform(tpt), transformSub(refinements)) + case AppliedTypeTree(tpt, args) => + cpy.AppliedTypeTree(tree)(transform(tpt), transform(args)) + case LambdaTypeTree(tparams, body) => + cpy.LambdaTypeTree(tree)(transformSub(tparams), transform(body)) + case TermLambdaTypeTree(params, body) => + cpy.TermLambdaTypeTree(tree)(transformSub(params), transform(body)) + case MatchTypeTree(bound, selector, cases) => + cpy.MatchTypeTree(tree)(transform(bound), transform(selector), transformSub(cases)) + case ByNameTypeTree(result) => + cpy.ByNameTypeTree(tree)(transform(result)) + case TypeBoundsTree(lo, hi, alias) => + cpy.TypeBoundsTree(tree)(transform(lo), transform(hi), transform(alias)) + case Bind(name, body) => + cpy.Bind(tree)(name, transform(body)) + case Alternative(trees) => + cpy.Alternative(tree)(transform(trees)) + case UnApply(fun, implicits, patterns) => + cpy.UnApply(tree)(transform(fun), transform(implicits), transform(patterns)) + case EmptyValDef => + tree + case tree @ ValDef(name, tpt, _) => + val tpt1 = transform(tpt) + val rhs1 = transform(tree.rhs) + cpy.ValDef(tree)(name, tpt1, rhs1) + case tree @ DefDef(name, paramss, tpt, _) => + cpy.DefDef(tree)(name, transformParamss(paramss), transform(tpt), transform(tree.rhs)) + case tree @ TypeDef(name, rhs) => + cpy.TypeDef(tree)(name, transform(rhs)) + case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => + cpy.Template(tree)(transformSub(constr), transform(tree.parents), Nil, transformSub(self), transformStats(tree.body, tree.symbol)) + case Import(expr, selectors) => + cpy.Import(tree)(transform(expr), selectors) + case Export(expr, selectors) => + cpy.Export(tree)(transform(expr), selectors) + case PackageDef(pid, stats) => + cpy.PackageDef(tree)(transformSub(pid), transformStats(stats, ctx.owner)) + case Annotated(arg, annot) => + cpy.Annotated(tree)(transform(arg), transform(annot)) + case Thicket(trees) => + val trees1 = transform(trees) + if (trees1 eq trees) tree else Thicket(trees1) + case tree @ Hole(_, _, args, content, tpt) => + cpy.Hole(tree)(args = transform(args), content = transform(content), tpt = transform(tpt)) + case _ => + transformMoreCases(tree) + } + } + } + + def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = + transform(trees) + def transformBlock(blk: Block)(using Context): Block = + cpy.Block(blk)(transformStats(blk.stats, ctx.owner), transform(blk.expr)) + def transform(trees: List[Tree])(using Context): List[Tree] = + flatten(trees mapConserve (transform(_))) + def transformSub[Tr <: Tree](tree: Tr)(using Context): Tr = + transform(tree).asInstanceOf[Tr] + def transformSub[Tr <: Tree](trees: List[Tr])(using Context): List[Tr] = + transform(trees).asInstanceOf[List[Tr]] + def transformParams(params: ParamClause)(using Context): ParamClause = + transform(params).asInstanceOf[ParamClause] + def transformParamss(paramss: List[ParamClause])(using Context): List[ParamClause] = + paramss.mapConserve(transformParams) + + protected def transformMoreCases(tree: Tree)(using Context): Tree = { + assert(ctx.reporter.errorsReported) + tree + } + } + + abstract class TreeAccumulator[X] { self: TreeAccumulator[X] @retains(caps.cap) => + // Ties the knot of the traversal: call `foldOver(x, tree))` to dive in the `tree` node. + def apply(x: X, tree: Tree)(using Context): X + + def apply(x: X, trees: List[Tree])(using Context): X = + def fold(x: X, trees: List[Tree]): X = trees match + case tree :: rest => fold(apply(x, tree), rest) + case Nil => x + fold(x, trees) + + def foldOver(x: X, tree: Tree)(using Context): X = + if (tree.source != ctx.source && tree.source.exists) + foldOver(x, tree)(using ctx.withSource(tree.source)) + else { + Stats.record(s"TreeAccumulator.foldOver/$getClass") + tree match { + case Ident(name) => + x + case Select(qualifier, name) => + this(x, qualifier) + case This(qual) => + x + case Super(qual, mix) => + this(x, qual) + case Apply(fun, args) => + this(this(x, fun), args) + case TypeApply(fun, args) => + this(this(x, fun), args) + case Literal(const) => + x + case New(tpt) => + this(x, tpt) + case Typed(expr, tpt) => + this(this(x, expr), tpt) + case NamedArg(name, arg) => + this(x, arg) + case Assign(lhs, rhs) => + this(this(x, lhs), rhs) + case Block(stats, expr) => + this(this(x, stats), expr) + case If(cond, thenp, elsep) => + this(this(this(x, cond), thenp), elsep) + case Closure(env, meth, tpt) => + this(this(this(x, env), meth), tpt) + case Match(selector, cases) => + this(this(x, selector), cases) + case CaseDef(pat, guard, body) => + this(this(this(x, pat), guard), body) + case Labeled(bind, expr) => + this(this(x, bind), expr) + case Return(expr, from) => + this(this(x, expr), from) + case WhileDo(cond, body) => + this(this(x, cond), body) + case Try(block, handler, finalizer) => + this(this(this(x, block), handler), finalizer) + case SeqLiteral(elems, elemtpt) => + this(this(x, elems), elemtpt) + case Inlined(call, bindings, expansion) => + this(this(x, bindings), expansion)(using inlineContext(call)) + case TypeTree() => + x + case SingletonTypeTree(ref) => + this(x, ref) + case RefinedTypeTree(tpt, refinements) => + this(this(x, tpt), refinements) + case AppliedTypeTree(tpt, args) => + this(this(x, tpt), args) + case LambdaTypeTree(tparams, body) => + inContext(localCtx(tree)) { + this(this(x, tparams), body) + } + case TermLambdaTypeTree(params, body) => + inContext(localCtx(tree)) { + this(this(x, params), body) + } + case MatchTypeTree(bound, selector, cases) => + this(this(this(x, bound), selector), cases) + case ByNameTypeTree(result) => + this(x, result) + case TypeBoundsTree(lo, hi, alias) => + this(this(this(x, lo), hi), alias) + case Bind(name, body) => + this(x, body) + case Alternative(trees) => + this(x, trees) + case UnApply(fun, implicits, patterns) => + this(this(this(x, fun), implicits), patterns) + case tree @ ValDef(_, tpt, _) => + inContext(localCtx(tree)) { + this(this(x, tpt), tree.rhs) + } + case tree @ DefDef(_, paramss, tpt, _) => + inContext(localCtx(tree)) { + this(this(paramss.foldLeft(x)(apply), tpt), tree.rhs) + } + case TypeDef(_, rhs) => + inContext(localCtx(tree)) { + this(x, rhs) + } + case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => + this(this(this(this(x, constr), parents), self), tree.body) + case Import(expr, _) => + this(x, expr) + case Export(expr, _) => + this(x, expr) + case PackageDef(pid, stats) => + this(this(x, pid), stats)(using localCtx(tree)) + case Annotated(arg, annot) => + this(this(x, arg), annot) + case Thicket(ts) => + this(x, ts) + case Hole(_, _, args, content, tpt) => + this(this(this(x, args), content), tpt) + case _ => + foldMoreCases(x, tree) + } + } + + def foldMoreCases(x: X, tree: Tree)(using Context): X = { + assert(ctx.reporter.hasUnreportedErrors + || ctx.reporter.errorsReported + || ctx.mode.is(Mode.Interactive), tree) + // In interactive mode, errors might come from previous runs. + // In case of errors it may be that typed trees point to untyped ones. + // The IDE can still traverse inside such trees, either in the run where errors + // are reported, or in subsequent ones. + x + } + } + + abstract class TreeTraverser extends TreeAccumulator[Unit] { + def traverse(tree: Tree)(using Context): Unit + def traverse(trees: List[Tree])(using Context) = apply((), trees) + def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) + protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) + } + + /** Fold `f` over all tree nodes, in depth-first, prefix order */ + class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { + def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) + } + + /** Fold `f` over all tree nodes, in depth-first, prefix order, but don't visit + * subtrees where `f` returns a different result for the root, i.e. `f(x, root) ne x`. + */ + class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { + def apply(x: X, tree: Tree)(using Context): X = { + val x1 = f(x, tree) + if (x1.asInstanceOf[AnyRef] ne x.asInstanceOf[AnyRef]) x1 + else foldOver(x1, tree) + } + } + + def rename(tree: NameTree, newName: Name)(using Context): tree.ThisTree[T] = { + tree match { + case tree: Ident => cpy.Ident(tree)(newName) + case tree: Select => cpy.Select(tree)(tree.qualifier, newName) + case tree: Bind => cpy.Bind(tree)(newName, tree.body) + case tree: ValDef => cpy.ValDef(tree)(name = newName.asTermName) + case tree: DefDef => cpy.DefDef(tree)(name = newName.asTermName) + case tree: TypeDef => cpy.TypeDef(tree)(name = newName.asTypeName) + } + }.asInstanceOf[tree.ThisTree[T]] + + object TypeDefs: + def unapply(xs: List[Tree]): Option[List[TypeDef]] = xs match + case (x: TypeDef) :: _ => Some(xs.asInstanceOf[List[TypeDef]]) + case _ => None + + object ValDefs: + def unapply(xs: List[Tree]): Option[List[ValDef]] = xs match + case Nil => Some(Nil) + case (x: ValDef) :: _ => Some(xs.asInstanceOf[List[ValDef]]) + case _ => None + + def termParamssIn(paramss: List[ParamClause]): List[List[ValDef]] = paramss match + case ValDefs(vparams) :: paramss1 => + val paramss2 = termParamssIn(paramss1) + if paramss2 eq paramss1 then paramss.asInstanceOf[List[List[ValDef]]] + else vparams :: paramss2 + case _ :: paramss1 => + termParamssIn(paramss1) + case nil => + Nil + + /** If `tparams` is non-empty, add it to the left `paramss`, merging + * it with a leading type parameter list of `paramss`, if one exists. + */ + def joinParams(tparams: List[TypeDef], paramss: List[ParamClause]): List[ParamClause] = + if tparams.isEmpty then paramss + else paramss match + case TypeDefs(tparams1) :: paramss1 => (tparams ++ tparams1) :: paramss1 + case _ => tparams :: paramss + + def isTermOnly(paramss: List[ParamClause]): Boolean = paramss match + case Nil => true + case params :: paramss1 => + params match + case (param: untpd.TypeDef) :: _ => false + case _ => isTermOnly(paramss1) + + def asTermOnly(paramss: List[ParamClause]): List[List[ValDef]] = + assert(isTermOnly(paramss)) + paramss.asInstanceOf[List[List[ValDef]]] + + /** Delegate to FunProto or FunProtoTyped depending on whether the prefix is `untpd` or `tpd`. */ + protected def FunProto(args: List[Tree], resType: Type)(using Context): ProtoTypes.FunProto + + /** Construct the application `$receiver.$method[$targs]($args)` using overloading resolution + * to find a matching overload of `$method` if necessary. + * This is useful when overloading resolution needs to be performed in a phase after typer. + * Note that this will not perform any kind of implicit search. + * + * @param expectedType An expected type of the application used to guide overloading resolution + */ + def applyOverloaded( + receiver: tpd.Tree, method: TermName, args: List[Tree], targs: List[Type], + expectedType: Type)(using parentCtx: Context): tpd.Tree = { + given ctx: Context = parentCtx.retractMode(Mode.ImplicitsEnabled) + import dotty.tools.dotc.ast.tpd.TreeOps + + val typer = ctx.typer + val proto = FunProto(args, expectedType) + val denot = receiver.tpe.member(method) + if !denot.exists then + overload.println(i"members = ${receiver.tpe.decls}") + report.error(em"no member $receiver . $method", receiver.srcPos) + val selected = + if (denot.isOverloaded) { + def typeParamCount(tp: Type) = tp.widen match { + case tp: PolyType => tp.paramInfos.length + case _ => 0 + } + val allAlts = denot.alternatives + .map(denot => TermRef(receiver.tpe, denot.symbol)) + .filter(tr => typeParamCount(tr) == targs.length) + .filter { _.widen match { + case MethodTpe(_, _, x: MethodType) => !x.isImplicitMethod + case _ => true + }} + val alternatives = ctx.typer.resolveOverloaded(allAlts, proto) + assert(alternatives.size == 1, + i"${if (alternatives.isEmpty) "no" else "multiple"} overloads available for " + + i"$method on ${receiver.tpe.widenDealiasKeepAnnots} with targs: $targs%, %; args: $args%, %; expectedType: $expectedType." + + i"all alternatives: ${allAlts.map(_.symbol.showDcl).mkString(", ")}\n" + + i"matching alternatives: ${alternatives.map(_.symbol.showDcl).mkString(", ")}.") // this is parsed from bytecode tree. there's nothing user can do about it + alternatives.head + } + else TermRef(receiver.tpe, denot.symbol) + val fun = receiver.select(selected).appliedToTypes(targs) + + val apply = untpd.Apply(fun, args) + typer.ApplyTo(apply, fun, selected, proto, expectedType) + } + + + def resolveConstructor(atp: Type, args: List[Tree])(using Context): tpd.Tree = { + val targs = atp.argTypes + withoutMode(Mode.PatternOrTypeBits) { + applyOverloaded(tpd.New(atp.typeConstructor), nme.CONSTRUCTOR, args, targs, atp) + } + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala new file mode 100644 index 000000000000..f778824a18d3 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala @@ -0,0 +1,1546 @@ +package dotty.tools +package dotc +package ast + +import dotty.tools.dotc.transform.{ExplicitOuter, Erasure} +import typer.ProtoTypes +import transform.SymUtils._ +import transform.TypeUtils._ +import core._ +import Scopes.newScope +import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._ +import Symbols._, StdNames._, Annotations._, Trees._, Symbols._ +import Decorators._, DenotTransformers._ +import collection.{immutable, mutable} +import util.{Property, SourceFile} +import NameKinds.{TempResultName, OuterSelectName} +import typer.ConstFold + +import scala.annotation.tailrec +import scala.collection.mutable.ListBuffer +import language.experimental.pureFunctions + +/** Some creators for typed trees */ +object tpd extends Trees.Instance[Type] with TypedTreeInfo { + + private def ta(using Context) = ctx.typeAssigner + + def Ident(tp: NamedType)(using Context): Ident = + ta.assignType(untpd.Ident(tp.name), tp) + + def Select(qualifier: Tree, name: Name)(using Context): Select = + ta.assignType(untpd.Select(qualifier, name), qualifier) + + def Select(qualifier: Tree, tp: NamedType)(using Context): Select = + untpd.Select(qualifier, tp.name).withType(tp) + + def This(cls: ClassSymbol)(using Context): This = + untpd.This(untpd.Ident(cls.name)).withType(cls.thisType) + + def Super(qual: Tree, mix: untpd.Ident, mixinClass: Symbol)(using Context): Super = + ta.assignType(untpd.Super(qual, mix), qual, mixinClass) + + def Super(qual: Tree, mixName: TypeName, mixinClass: Symbol = NoSymbol)(using Context): Super = + Super(qual, if (mixName.isEmpty) untpd.EmptyTypeIdent else untpd.Ident(mixName), mixinClass) + + def Apply(fn: Tree, args: List[Tree])(using Context): Apply = fn match + case Block(Nil, expr) => + Apply(expr, args) + case _: RefTree | _: GenericApply | _: Inlined | _: Hole => + ta.assignType(untpd.Apply(fn, args), fn, args) + + def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match + case Block(Nil, expr) => + TypeApply(expr, args) + case _: RefTree | _: GenericApply => + ta.assignType(untpd.TypeApply(fn, args), fn, args) + + def Literal(const: Constant)(using Context): Literal = + ta.assignType(untpd.Literal(const)) + + def unitLiteral(using Context): Literal = + Literal(Constant(())) + + def nullLiteral(using Context): Literal = + Literal(Constant(null)) + + def New(tpt: Tree)(using Context): New = + ta.assignType(untpd.New(tpt), tpt) + + def New(tp: Type)(using Context): New = New(TypeTree(tp)) + + def Typed(expr: Tree, tpt: Tree)(using Context): Typed = + ta.assignType(untpd.Typed(expr, tpt), tpt) + + def NamedArg(name: Name, arg: Tree)(using Context): NamedArg = + ta.assignType(untpd.NamedArg(name, arg), arg) + + def Assign(lhs: Tree, rhs: Tree)(using Context): Assign = + ta.assignType(untpd.Assign(lhs, rhs)) + + def Block(stats: List[Tree], expr: Tree)(using Context): Block = + ta.assignType(untpd.Block(stats, expr), stats, expr) + + /** Join `stats` in front of `expr` creating a new block if necessary */ + def seq(stats: List[Tree], expr: Tree)(using Context): Tree = + if (stats.isEmpty) expr + else expr match { + case Block(_, _: Closure) => + Block(stats, expr) // leave closures in their own block + case Block(estats, eexpr) => + cpy.Block(expr)(stats ::: estats, eexpr).withType(ta.avoidingType(eexpr, stats)) + case _ => + Block(stats, expr) + } + + def If(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = + ta.assignType(untpd.If(cond, thenp, elsep), thenp, elsep) + + def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = + ta.assignType(untpd.InlineIf(cond, thenp, elsep), thenp, elsep) + + def Closure(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = + ta.assignType(untpd.Closure(env, meth, tpt), meth, tpt) + + /** A function def + * + * vparams => expr + * + * gets expanded to + * + * { def $anonfun(vparams) = expr; Closure($anonfun) } + * + * where the closure's type is the target type of the expression (FunctionN, unless + * otherwise specified). + */ + def Closure(meth: TermSymbol, rhsFn: List[List[Tree]] => Tree, targs: List[Tree] = Nil, targetType: Type = NoType)(using Context): Block = { + val targetTpt = if (targetType.exists) TypeTree(targetType) else EmptyTree + val call = + if (targs.isEmpty) Ident(TermRef(NoPrefix, meth)) + else TypeApply(Ident(TermRef(NoPrefix, meth)), targs) + Block( + DefDef(meth, rhsFn) :: Nil, + Closure(Nil, call, targetTpt)) + } + + /** A closure whose anonymous function has the given method type */ + def Lambda(tpe: MethodType, rhsFn: List[Tree] => Tree)(using Context): Block = { + val meth = newAnonFun(ctx.owner, tpe) + Closure(meth, tss => rhsFn(tss.head).changeOwner(ctx.owner, meth)) + } + + def CaseDef(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = + ta.assignType(untpd.CaseDef(pat, guard, body), pat, body) + + def Match(selector: Tree, cases: List[CaseDef])(using Context): Match = + ta.assignType(untpd.Match(selector, cases), selector, cases) + + def InlineMatch(selector: Tree, cases: List[CaseDef])(using Context): Match = + ta.assignType(untpd.InlineMatch(selector, cases), selector, cases) + + def Labeled(bind: Bind, expr: Tree)(using Context): Labeled = + ta.assignType(untpd.Labeled(bind, expr)) + + def Labeled(sym: TermSymbol, expr: Tree)(using Context): Labeled = + Labeled(Bind(sym, EmptyTree), expr) + + def Return(expr: Tree, from: Tree)(using Context): Return = + ta.assignType(untpd.Return(expr, from)) + + def Return(expr: Tree, from: Symbol)(using Context): Return = + Return(expr, Ident(from.termRef)) + + def WhileDo(cond: Tree, body: Tree)(using Context): WhileDo = + ta.assignType(untpd.WhileDo(cond, body)) + + def Try(block: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = + ta.assignType(untpd.Try(block, cases, finalizer), block, cases) + + def SeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = + ta.assignType(untpd.SeqLiteral(elems, elemtpt), elems, elemtpt) + + def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): JavaSeqLiteral = + ta.assignType(untpd.JavaSeqLiteral(elems, elemtpt), elems, elemtpt).asInstanceOf[JavaSeqLiteral] + + def Inlined(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = + ta.assignType(untpd.Inlined(call, bindings, expansion), bindings, expansion) + + def TypeTree(tp: Type, inferred: Boolean = false)(using Context): TypeTree = + (if inferred then untpd.InferredTypeTree() else untpd.TypeTree()).withType(tp) + + def SingletonTypeTree(ref: Tree)(using Context): SingletonTypeTree = + ta.assignType(untpd.SingletonTypeTree(ref), ref) + + def RefinedTypeTree(parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(using Context): Tree = + ta.assignType(untpd.RefinedTypeTree(parent, refinements), parent, refinements, refineCls) + + def AppliedTypeTree(tycon: Tree, args: List[Tree])(using Context): AppliedTypeTree = + ta.assignType(untpd.AppliedTypeTree(tycon, args), tycon, args) + + def ByNameTypeTree(result: Tree)(using Context): ByNameTypeTree = + ta.assignType(untpd.ByNameTypeTree(result), result) + + def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = + ta.assignType(untpd.LambdaTypeTree(tparams, body), tparams, body) + + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = + ta.assignType(untpd.MatchTypeTree(bound, selector, cases), bound, selector, cases) + + def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(using Context): TypeBoundsTree = + ta.assignType(untpd.TypeBoundsTree(lo, hi, alias), lo, hi, alias) + + def Bind(sym: Symbol, body: Tree)(using Context): Bind = + ta.assignType(untpd.Bind(sym.name, body), sym) + + /** A pattern corresponding to `sym: tpe` */ + def BindTyped(sym: TermSymbol, tpe: Type)(using Context): Bind = + Bind(sym, Typed(Underscore(tpe), TypeTree(tpe))) + + def Alternative(trees: List[Tree])(using Context): Alternative = + ta.assignType(untpd.Alternative(trees), trees) + + def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree], proto: Type)(using Context): UnApply = { + assert(fun.isInstanceOf[RefTree] || fun.isInstanceOf[GenericApply]) + ta.assignType(untpd.UnApply(fun, implicits, patterns), proto) + } + + def ValDef(sym: TermSymbol, rhs: LazyTree = EmptyTree, inferred: Boolean = false)(using Context): ValDef = + ta.assignType(untpd.ValDef(sym.name, TypeTree(sym.info, inferred), rhs), sym) + + def SyntheticValDef(name: TermName, rhs: Tree, flags: FlagSet = EmptyFlags)(using Context): ValDef = + ValDef(newSymbol(ctx.owner, name, Synthetic | flags, rhs.tpe.widen, coord = rhs.span), rhs) + + def DefDef(sym: TermSymbol, paramss: List[List[Symbol]], + resultType: Type, rhs: Tree)(using Context): DefDef = + sym.setParamss(paramss) + ta.assignType( + untpd.DefDef( + sym.name, + paramss.map { + case TypeSymbols(params) => params.map(param => TypeDef(param).withSpan(param.span)) + case TermSymbols(params) => params.map(param => ValDef(param).withSpan(param.span)) + case _ => unreachable() + }, + TypeTree(resultType), + rhs), + sym) + + def DefDef(sym: TermSymbol, rhs: Tree = EmptyTree)(using Context): DefDef = + ta.assignType(DefDef(sym, Function.const(rhs) _), sym) + + /** A DefDef with given method symbol `sym`. + * @rhsFn A function from parameter references + * to the method's right-hand side. + * Parameter symbols are taken from the `rawParamss` field of `sym`, or + * are freshly generated if `rawParamss` is empty. + */ + def DefDef(sym: TermSymbol, rhsFn: List[List[Tree]] => Tree)(using Context): DefDef = + + // Map method type `tp` with remaining parameters stored in rawParamss to + // final result type and all (given or synthesized) parameters + def recur(tp: Type, remaining: List[List[Symbol]]): (Type, List[List[Symbol]]) = tp match + case tp: PolyType => + val (tparams: List[TypeSymbol], remaining1) = remaining match + case tparams :: remaining1 => + assert(tparams.hasSameLengthAs(tp.paramNames) && tparams.head.isType) + (tparams.asInstanceOf[List[TypeSymbol]], remaining1) + case nil => + (newTypeParams(sym, tp.paramNames, EmptyFlags, tp.instantiateParamInfos(_)), Nil) + val (rtp, paramss) = recur(tp.instantiate(tparams.map(_.typeRef)), remaining1) + (rtp, tparams :: paramss) + case tp: MethodType => + val isParamDependent = tp.isParamDependent + val previousParamRefs: ListBuffer[TermRef] = + // It is ok to assign `null` here. + // If `isParamDependent == false`, the value of `previousParamRefs` is not used. + if isParamDependent then mutable.ListBuffer[TermRef]() else (null: ListBuffer[TermRef] | Null).uncheckedNN + + def valueParam(name: TermName, origInfo: Type): TermSymbol = + val maybeImplicit = + if tp.isContextualMethod then Given + else if tp.isImplicitMethod then Implicit + else EmptyFlags + val maybeErased = if tp.isErasedMethod then Erased else EmptyFlags + + def makeSym(info: Type) = newSymbol(sym, name, TermParam | maybeImplicit | maybeErased, info, coord = sym.coord) + + if isParamDependent then + val sym = makeSym(origInfo.substParams(tp, previousParamRefs.toList)) + previousParamRefs += sym.termRef + sym + else makeSym(origInfo) + end valueParam + + val (vparams: List[TermSymbol], remaining1) = + if tp.paramNames.isEmpty then (Nil, remaining) + else remaining match + case vparams :: remaining1 => + assert(vparams.hasSameLengthAs(tp.paramNames) && vparams.head.isTerm) + (vparams.asInstanceOf[List[TermSymbol]], remaining1) + case nil => + (tp.paramNames.lazyZip(tp.paramInfos).map(valueParam), Nil) + val (rtp, paramss) = recur(tp.instantiate(vparams.map(_.termRef)), remaining1) + (rtp, vparams :: paramss) + case _ => + assert(remaining.isEmpty) + (tp.widenExpr, Nil) + end recur + + val (rtp, paramss) = recur(sym.info, sym.rawParamss) + DefDef(sym, paramss, rtp, rhsFn(paramss.nestedMap(ref))) + end DefDef + + def TypeDef(sym: TypeSymbol)(using Context): TypeDef = + ta.assignType(untpd.TypeDef(sym.name, TypeTree(sym.info)), sym) + + def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(using Context): TypeDef = { + val firstParent :: otherParents = cls.info.parents: @unchecked + val superRef = + if (cls.is(Trait)) TypeTree(firstParent) + else { + def isApplicable(ctpe: Type): Boolean = ctpe match { + case ctpe: PolyType => + isApplicable(ctpe.instantiate(firstParent.argTypes)) + case ctpe: MethodType => + (superArgs corresponds ctpe.paramInfos)(_.tpe <:< _) + case _ => + false + } + val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info)) + New(firstParent, constr.symbol.asTerm, superArgs) + } + ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) + } + + def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(using Context): TypeDef = { + val selfType = + if (cls.classInfo.selfInfo ne NoType) ValDef(newSelfSym(cls)) + else EmptyValDef + def isOwnTypeParam(stat: Tree) = + stat.symbol.is(TypeParam) && stat.symbol.owner == cls + val bodyTypeParams = body filter isOwnTypeParam map (_.symbol) + val newTypeParams = + for (tparam <- cls.typeParams if !(bodyTypeParams contains tparam)) + yield TypeDef(tparam) + val findLocalDummy = FindLocalDummyAccumulator(cls) + val localDummy = body.foldLeft(NoSymbol: Symbol)(findLocalDummy.apply) + .orElse(newLocalDummy(cls)) + val impl = untpd.Template(constr, parents, Nil, selfType, newTypeParams ++ body) + .withType(localDummy.termRef) + ta.assignType(untpd.TypeDef(cls.name, impl), cls) + } + + /** An anonymous class + * + * new parents { forwarders } + * + * where `forwarders` contains forwarders for all functions in `fns`. + * @param parents a non-empty list of class types + * @param fns a non-empty of functions for which forwarders should be defined in the class. + * The class has the same owner as the first function in `fns`. + * Its position is the union of all functions in `fns`. + */ + def AnonClass(parents: List[Type], fns: List[TermSymbol], methNames: List[TermName])(using Context): Block = { + AnonClass(fns.head.owner, parents, fns.map(_.span).reduceLeft(_ union _)) { cls => + def forwarder(fn: TermSymbol, name: TermName) = { + val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm + for overridden <- fwdMeth.allOverriddenSymbols do + if overridden.is(Extension) then fwdMeth.setFlag(Extension) + if !overridden.is(Deferred) then fwdMeth.setFlag(Override) + DefDef(fwdMeth, ref(fn).appliedToArgss(_)) + } + fns.lazyZip(methNames).map(forwarder) + } + } + + /** An anonymous class + * + * new parents { body } + * + * with the specified owner and position. + */ + def AnonClass(owner: Symbol, parents: List[Type], coord: Coord)(body: ClassSymbol => List[Tree])(using Context): Block = + val parents1 = + if (parents.head.classSymbol.is(Trait)) { + val head = parents.head.parents.head + if (head.isRef(defn.AnyClass)) defn.AnyRefType :: parents else head :: parents + } + else parents + val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, coord = coord) + val constr = newConstructor(cls, Synthetic, Nil, Nil).entered + val cdef = ClassDef(cls, DefDef(constr), body(cls)) + Block(cdef :: Nil, New(cls.typeRef, Nil)) + + def Import(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = + ta.assignType(untpd.Import(expr, selectors), newImportSymbol(ctx.owner, expr)) + + def Export(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = + ta.assignType(untpd.Export(expr, selectors)) + + def PackageDef(pid: RefTree, stats: List[Tree])(using Context): PackageDef = + ta.assignType(untpd.PackageDef(pid, stats), pid) + + def Annotated(arg: Tree, annot: Tree)(using Context): Annotated = + ta.assignType(untpd.Annotated(arg, annot), arg, annot) + + def Throw(expr: Tree)(using Context): Tree = + ref(defn.throwMethod).appliedTo(expr) + + def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = + ta.assignType(untpd.Hole(isTermHole, idx, args, content, tpt), tpt) + + // ------ Making references ------------------------------------------------------ + + def prefixIsElidable(tp: NamedType)(using Context): Boolean = { + val typeIsElidable = tp.prefix match { + case pre: ThisType => + tp.isType || + pre.cls.isStaticOwner || + tp.symbol.isParamOrAccessor && !pre.cls.is(Trait) && ctx.owner.enclosingClass == pre.cls + // was ctx.owner.enclosingClass.derivesFrom(pre.cls) which was not tight enough + // and was spuriously triggered in case inner class would inherit from outer one + // eg anonymous TypeMap inside TypeMap.andThen + case pre: TermRef => + pre.symbol.is(Module) && pre.symbol.isStatic + case pre => + pre `eq` NoPrefix + } + typeIsElidable || + tp.symbol.is(JavaStatic) || + tp.symbol.hasAnnotation(defn.ScalaStaticAnnot) + } + + def needsSelect(tp: Type)(using Context): Boolean = tp match { + case tp: TermRef => !prefixIsElidable(tp) + case _ => false + } + + /** A tree representing the same reference as the given type */ + def ref(tp: NamedType, needLoad: Boolean = true)(using Context): Tree = + if (tp.isType) TypeTree(tp) + else if (prefixIsElidable(tp)) Ident(tp) + else if (tp.symbol.is(Module) && ctx.owner.isContainedIn(tp.symbol.moduleClass)) + followOuterLinks(This(tp.symbol.moduleClass.asClass)) + else if (tp.symbol hasAnnotation defn.ScalaStaticAnnot) + Ident(tp) + else + val pre = tp.prefix + if (pre.isSingleton) followOuterLinks(singleton(pre.dealias, needLoad)).select(tp) + else + val res = Select(TypeTree(pre), tp) + if needLoad && !res.symbol.isStatic then + throw TypeError(em"cannot establish a reference to $res") + res + + def ref(sym: Symbol)(using Context): Tree = + ref(NamedType(sym.owner.thisType, sym.name, sym.denot)) + + private def followOuterLinks(t: Tree)(using Context) = t match { + case t: This if ctx.erasedTypes && !(t.symbol == ctx.owner.enclosingClass || t.symbol.isStaticOwner) => + // after erasure outer paths should be respected + ExplicitOuter.OuterOps(ctx.detach).path(toCls = t.tpe.classSymbol) + case t => + t + } + + def singleton(tp: Type, needLoad: Boolean = true)(using Context): Tree = tp.dealias match { + case tp: TermRef => ref(tp, needLoad) + case tp: ThisType => This(tp.cls) + case tp: SkolemType => singleton(tp.narrow, needLoad) + case SuperType(qual, _) => singleton(qual, needLoad) + case ConstantType(value) => Literal(value) + } + + /** A path that corresponds to the given type `tp`. Error if `tp` is not a refinement + * of an addressable singleton type. + */ + def pathFor(tp: Type)(using Context): Tree = { + def recur(tp: Type): Tree = tp match { + case tp: NamedType => + tp.info match { + case TypeAlias(alias) => recur(alias) + case _: TypeBounds => EmptyTree + case _ => singleton(tp) + } + case tp: TypeProxy => recur(tp.superType) + case _ => EmptyTree + } + recur(tp).orElse { + report.error(em"$tp is not an addressable singleton type") + TypeTree(tp) + } + } + + /** A tree representing a `newXYZArray` operation of the right + * kind for the given element type in `elemTpe`. No type arguments or + * `length` arguments are given. + */ + def newArray(elemTpe: Type, returnTpe: Type, span: Span, dims: JavaSeqLiteral)(using Context): Tree = { + val elemClass = elemTpe.classSymbol + def newArr = + ref(defn.DottyArraysModule).select(defn.newArrayMethod).withSpan(span) + + if (!ctx.erasedTypes) { + assert(!TypeErasure.isGeneric(elemTpe), elemTpe) //needs to be done during typer. See Applications.convertNewGenericArray + newArr.appliedToTypeTrees(TypeTree(returnTpe) :: Nil).appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) + } + else // after erasure + newArr.appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) + } + + /** The wrapped array method name for an array of type elemtp */ + def wrapArrayMethodName(elemtp: Type)(using Context): TermName = { + val elemCls = elemtp.classSymbol + if (elemCls.isPrimitiveValueClass) nme.wrapXArray(elemCls.name) + else if (elemCls.derivesFrom(defn.ObjectClass) && !elemCls.isNotRuntimeClass) nme.wrapRefArray + else nme.genericWrapArray + } + + /** A tree representing a `wrapXYZArray(tree)` operation of the right + * kind for the given element type in `elemTpe`. + */ + def wrapArray(tree: Tree, elemtp: Type)(using Context): Tree = + val wrapper = ref(defn.getWrapVarargsArrayModule) + .select(wrapArrayMethodName(elemtp)) + .appliedToTypes(if (elemtp.isPrimitiveValueType) Nil else elemtp :: Nil) + val actualElem = wrapper.tpe.widen.firstParamTypes.head + wrapper.appliedTo(tree.ensureConforms(actualElem)) + + // ------ Creating typed equivalents of trees that exist only in untyped form ------- + + /** new C(args), calling the primary constructor of C */ + def New(tp: Type, args: List[Tree])(using Context): Apply = + New(tp, tp.dealias.typeSymbol.primaryConstructor.asTerm, args) + + /** new C(args), calling given constructor `constr` of C */ + def New(tp: Type, constr: TermSymbol, args: List[Tree])(using Context): Apply = { + val targs = tp.argTypes + val tycon = tp.typeConstructor + New(tycon) + .select(TermRef(tycon, constr)) + .appliedToTypes(targs) + .appliedToTermArgs(args) + } + + /** An object def + * + * object obs extends parents { decls } + * + * gets expanded to + * + * val obj = new obj$ + * class obj$ extends parents { this: obj.type => decls } + * + * (The following no longer applies: + * What's interesting here is that the block is well typed + * (because class obj$ is hoistable), but the type of the `obj` val is + * not expressible. What needs to happen in general when + * inferring the type of a val from its RHS, is: if the type contains + * a class that has the val itself as owner, then that class + * is remapped to have the val's owner as owner. Remapping could be + * done by cloning the class with the new owner and substituting + * everywhere in the tree. We know that remapping is safe + * because the only way a local class can appear in the RHS of a val is + * by being hoisted outside of a block, and the necessary checks are + * done at this point already. + * + * On the other hand, for method result type inference, if the type of + * the RHS of a method contains a class owned by the method, this would be + * an error.) + */ + def ModuleDef(sym: TermSymbol, body: List[Tree])(using Context): tpd.Thicket = { + val modcls = sym.moduleClass.asClass + val constrSym = modcls.primaryConstructor orElse newDefaultConstructor(modcls).entered + val constr = DefDef(constrSym.asTerm, EmptyTree) + val clsdef = ClassDef(modcls, constr, body) + val valdef = ValDef(sym, New(modcls.typeRef).select(constrSym).appliedToNone) + Thicket(valdef, clsdef) + } + + /** A `_` with given type */ + def Underscore(tp: Type)(using Context): Ident = untpd.Ident(nme.WILDCARD).withType(tp) + + def defaultValue(tpe: Type)(using Context): Tree = { + val tpw = tpe.widen + + if (tpw isRef defn.IntClass) Literal(Constant(0)) + else if (tpw isRef defn.LongClass) Literal(Constant(0L)) + else if (tpw isRef defn.BooleanClass) Literal(Constant(false)) + else if (tpw isRef defn.CharClass) Literal(Constant('\u0000')) + else if (tpw isRef defn.FloatClass) Literal(Constant(0f)) + else if (tpw isRef defn.DoubleClass) Literal(Constant(0d)) + else if (tpw isRef defn.ByteClass) Literal(Constant(0.toByte)) + else if (tpw isRef defn.ShortClass) Literal(Constant(0.toShort)) + else nullLiteral.select(defn.Any_asInstanceOf).appliedToType(tpe) + } + + private class FindLocalDummyAccumulator(cls: ClassSymbol)(using Context) extends TreeAccumulator[Symbol] { + def apply(sym: Symbol, tree: Tree)(using Context) = + if (sym.exists) sym + else if (tree.isDef) { + val owner = tree.symbol.owner + if (owner.isLocalDummy && owner.owner == cls) owner + else if (owner == cls) foldOver(sym, tree) + else sym + } + else foldOver(sym, tree) + } + + /** The owner to be used in a local context when traversing a tree */ + def localOwner(tree: Tree)(using Context): Symbol = + val sym = tree.symbol + (if sym.is(PackageVal) then sym.moduleClass else sym).orElse(ctx.owner) + + /** The local context to use when traversing trees */ + def localCtx(tree: Tree)(using Context): Context = ctx.withOwner(localOwner(tree)) + + override val cpy: TypedTreeCopier = // Type ascription needed to pick up any new members in TreeCopier (currently there are none) + TypedTreeCopier() + + val cpyBetweenPhases: TimeTravellingTreeCopier = TimeTravellingTreeCopier() + + class TypedTreeCopier extends TreeCopier { + def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[Type] = + copied.withTypeUnchecked(tree.tpe) + def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[Type] = + copied.withTypeUnchecked(tree.tpe) + + protected val untpdCpy = untpd.cpy + + override def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = { + val tree1 = untpdCpy.Select(tree)(qualifier, name) + tree match { + case tree: Select if qualifier.tpe eq tree.qualifier.tpe => + tree1.withTypeUnchecked(tree.tpe) + case _ => + val tree2: Select = tree.tpe match { + case tpe: NamedType => + val qualType = qualifier.tpe.widenIfUnstable + if qualType.isExactlyNothing then tree1.withTypeUnchecked(tree.tpe) + else tree1.withType(tpe.derivedSelect(qualType)) + case _ => tree1.withTypeUnchecked(tree.tpe) + } + ConstFold.Select(tree2) + } + } + + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = { + val tree1 = untpdCpy.Apply(tree)(fun, args) + tree match { + case tree: Apply + if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, fun, args) + } + } + + override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = { + val tree1 = untpdCpy.TypeApply(tree)(fun, args) + tree match { + case tree: TypeApply + if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, fun, args) + } + } + + override def Literal(tree: Tree)(const: Constant)(using Context): Literal = + ta.assignType(untpdCpy.Literal(tree)(const)) + + override def New(tree: Tree)(tpt: Tree)(using Context): New = + ta.assignType(untpdCpy.New(tree)(tpt), tpt) + + override def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = + ta.assignType(untpdCpy.Typed(tree)(expr, tpt), tpt) + + override def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = + ta.assignType(untpdCpy.NamedArg(tree)(name, arg), arg) + + override def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = + ta.assignType(untpdCpy.Assign(tree)(lhs, rhs)) + + override def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = { + val tree1 = untpdCpy.Block(tree)(stats, expr) + tree match { + case tree: Block if (expr.tpe eq tree.expr.tpe) && (expr.tpe eq tree.tpe) => + // The last guard is a conservative check: if `tree.tpe` is different from `expr.tpe`, then + // it was computed from widening `expr.tpe`, and tree transforms might cause `expr.tpe.widen` + // to change even if `expr.tpe` itself didn't change, e.g: + // { val s = ...; s } + // If the type of `s` changed, then the type of the block might have changed, even though `expr.tpe` + // will still be `TermRef(NoPrefix, s)` + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, stats, expr) + } + } + + override def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = { + val tree1 = untpdCpy.If(tree)(cond, thenp, elsep) + tree match { + case tree: If if (thenp.tpe eq tree.thenp.tpe) && (elsep.tpe eq tree.elsep.tpe) && + ((tree.tpe eq thenp.tpe) || (tree.tpe eq elsep.tpe)) => + // The last guard is a conservative check similar to the one done in `Block` above, + // if `tree.tpe` is not identical to the type of one of its branch, it might have been + // computed from the widened type of the branches, so the same reasoning than + // in `Block` applies. + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, thenp, elsep) + } + } + + override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = { + val tree1 = untpdCpy.Closure(tree)(env, meth, tpt) + tree match { + case tree: Closure if sameTypes(env, tree.env) && (meth.tpe eq tree.meth.tpe) && (tpt.tpe eq tree.tpt.tpe) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, meth, tpt) + } + } + + override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = { + val tree1 = untpdCpy.Match(tree)(selector, cases) + tree match { + case tree: Match if sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, selector, cases) + } + } + + override def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = { + val tree1 = untpdCpy.CaseDef(tree)(pat, guard, body) + tree match { + case tree: CaseDef if body.tpe eq tree.body.tpe => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, pat, body) + } + } + + override def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = + ta.assignType(untpdCpy.Labeled(tree)(bind, expr)) + + override def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = + ta.assignType(untpdCpy.Return(tree)(expr, from)) + + override def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = + ta.assignType(untpdCpy.WhileDo(tree)(cond, body)) + + override def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = { + val tree1 = untpdCpy.Try(tree)(expr, cases, finalizer) + tree match { + case tree: Try if (expr.tpe eq tree.expr.tpe) && sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, expr, cases) + } + } + + override def Inlined(tree: Tree)(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = { + val tree1 = untpdCpy.Inlined(tree)(call, bindings, expansion) + tree match { + case tree: Inlined if sameTypes(bindings, tree.bindings) && (expansion.tpe eq tree.expansion.tpe) => + tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, bindings, expansion) + } + } + + override def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = { + val tree1 = untpdCpy.SeqLiteral(tree)(elems, elemtpt) + tree match { + case tree: SeqLiteral + if sameTypes(elems, tree.elems) && (elemtpt.tpe eq tree.elemtpt.tpe) => + tree1.withTypeUnchecked(tree.tpe) + case _ => + ta.assignType(tree1, elems, elemtpt) + } + } + + override def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = { + val tree1 = untpdCpy.Annotated(tree)(arg, annot) + tree match { + case tree: Annotated if (arg.tpe eq tree.arg.tpe) && (annot eq tree.annot) => tree1.withTypeUnchecked(tree.tpe) + case _ => ta.assignType(tree1, arg, annot) + } + } + + override def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = + If(tree: Tree)(cond, thenp, elsep) + override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = + Closure(tree: Tree)(env, meth, tpt) + override def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = + CaseDef(tree: Tree)(pat, guard, body) + override def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = + Try(tree: Tree)(expr, cases, finalizer) + } + + class TimeTravellingTreeCopier extends TypedTreeCopier { + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = + tree match + case tree: Apply + if (tree.fun eq fun) && (tree.args eq args) + && tree.tpe.isInstanceOf[ConstantType] + && isPureExpr(tree) => tree + case _ => + ta.assignType(untpdCpy.Apply(tree)(fun, args), fun, args) + // Note: Reassigning the original type if `fun` and `args` have the same types as before + // does not work here in general: The computed type depends on the widened function type, not + // the function type itself. A tree transform may keep the function type the + // same but its widened type might change. + // However, we keep constant types of pure expressions. This uses the underlying assumptions + // that pure functions yielding a constant will not change in later phases. + + override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = + ta.assignType(untpdCpy.TypeApply(tree)(fun, args), fun, args) + // Same remark as for Apply + + override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = + ta.assignType(untpdCpy.Closure(tree)(env, meth, tpt), meth, tpt) + + override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = + Closure(tree: Tree)(env, meth, tpt) + } + + override def skipTransform(tree: Tree)(using Context): Boolean = tree.tpe.isError + + implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal { + + def isValue(using Context): Boolean = + tree.isTerm && tree.tpe.widen.isValueType + + def isValueOrPattern(using Context): Boolean = + tree.isValue || tree.isPattern + + def isValueType: Boolean = + tree.isType && tree.tpe.isValueType + + def isInstantiation: Boolean = tree match { + case Apply(Select(New(_), nme.CONSTRUCTOR), _) => true + case _ => false + } + + def shallowFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = + ShallowFolder(op).apply(z, tree) + + def deepFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = + DeepFolder(op).apply(z, tree) + + def find[T](pred: (tpd.Tree) => Boolean)(using Context): Option[tpd.Tree] = + shallowFold[Option[tpd.Tree]](None)((accum, tree) => if (pred(tree)) Some(tree) else accum) + + def subst(from: List[Symbol], to: List[Symbol])(using Context): ThisTree = + TreeTypeMap(substFrom = from, substTo = to).apply(tree) + + /** Change owner from `from` to `to`. If `from` is a weak owner, also change its + * owner to `to`, and continue until a non-weak owner is reached. + */ + def changeOwner(from: Symbol, to: Symbol)(using Context): ThisTree = { + @tailrec def loop(from: Symbol, froms: List[Symbol], tos: List[Symbol]): ThisTree = + if (from.isWeakOwner && !from.owner.isClass) + loop(from.owner, from :: froms, to :: tos) + else + //println(i"change owner ${from :: froms}%, % ==> $tos of $tree") + TreeTypeMap(oldOwners = from :: froms, newOwners = tos).apply(tree) + if (from == to) tree else loop(from, Nil, to :: Nil) + } + + /** + * Set the owner of every definition in this tree which is not itself contained in this + * tree to be `newowner` + */ + def changeNonLocalOwners(newOwner: Symbol)(using Context): Tree = { + val ownerAcc = new TreeAccumulator[immutable.Set[Symbol]] { + def apply(ss: immutable.Set[Symbol], tree: Tree)(using Context) = tree match { + case tree: DefTree => + val sym = tree.symbol + if sym.exists && !sym.owner.is(Package) then ss + sym.owner else ss + case _ => + foldOver(ss, tree) + } + } + val owners = ownerAcc(immutable.Set.empty[Symbol], tree).toList + val newOwners = List.fill(owners.size)(newOwner) + TreeTypeMap(oldOwners = owners, newOwners = newOwners).apply(tree) + } + + /** After phase `trans`, set the owner of every definition in this tree that was formerly + * owner by `from` to `to`. + */ + def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(using Context): ThisTree = + if (ctx.phase == trans.next) { + val traverser = new TreeTraverser { + def traverse(tree: Tree)(using Context) = tree match { + case tree: DefTree => + val sym = tree.symbol + val prevDenot = atPhase(trans)(sym.denot) + if (prevDenot.effectiveOwner == from.skipWeakOwner) { + val d = sym.copySymDenotation(owner = to) + d.installAfter(trans) + d.transformAfter(trans, d => if (d.owner eq from) d.copySymDenotation(owner = to) else d) + } + if (sym.isWeakOwner) traverseChildren(tree) + case _ => + traverseChildren(tree) + } + } + traverser.traverse(tree) + tree + } + else atPhase(trans.next)(changeOwnerAfter(from, to, trans)) + + /** A select node with the given selector name and a computed type */ + def select(name: Name)(using Context): Select = + Select(tree, name) + + /** A select node with the given selector name such that the designated + * member satisfies predicate `p`. Useful for disambiguating overloaded members. + */ + def select(name: Name, p: Symbol => Boolean)(using Context): Select = + select(tree.tpe.member(name).suchThat(p).symbol) + + /** A select node with the given type */ + def select(tp: NamedType)(using Context): Select = + untpd.Select(tree, tp.name).withType(tp) + + /** A select node that selects the given symbol. Note: Need to make sure this + * is in fact the symbol you would get when you select with the symbol's name, + * otherwise a data race may occur which would be flagged by -Yno-double-bindings. + */ + def select(sym: Symbol)(using Context): Select = { + val tp = + if (sym.isType) { + assert(!sym.is(TypeParam)) + TypeRef(tree.tpe, sym.asType) + } + else + TermRef(tree.tpe, sym.name.asTermName, sym.denot.asSeenFrom(tree.tpe)) + untpd.Select(tree, sym.name).withType(tp) + } + + /** A select node with the given selector name and signature and a computed type */ + def selectWithSig(name: Name, sig: Signature, target: Name)(using Context): Tree = + untpd.SelectWithSig(tree, name, sig).withType(tree.tpe.select(name.asTermName, sig, target)) + + /** A select node with selector name and signature taken from `sym`. + * Note: Use this method instead of select(sym) if the referenced symbol + * might be overridden in the type of the qualifier prefix. See note + * on select(sym: Symbol). + */ + def selectWithSig(sym: Symbol)(using Context): Tree = + selectWithSig(sym.name, sym.signature, sym.targetName) + + /** A unary apply node with given argument: `tree(arg)` */ + def appliedTo(arg: Tree)(using Context): Apply = + appliedToTermArgs(arg :: Nil) + + /** An apply node with given arguments: `tree(arg, args0, ..., argsN)` */ + def appliedTo(arg: Tree, args: Tree*)(using Context): Apply = + appliedToTermArgs(arg :: args.toList) + + /** An apply node with given argument list `tree(args(0), ..., args(args.length - 1))` */ + def appliedToTermArgs(args: List[Tree])(using Context): Apply = + Apply(tree, args) + + /** An applied node that accepts only varargs as arguments */ + def appliedToVarargs(args: List[Tree], tpt: Tree)(using Context): Apply = + appliedTo(repeated(args, tpt)) + + /** An apply or type apply node with given argument list */ + def appliedToArgs(args: List[Tree])(using Context): GenericApply = args match + case arg :: args1 if arg.isType => TypeApply(tree, args) + case _ => Apply(tree, args) + + /** The current tree applied to given argument lists: + * `tree (argss(0)) ... (argss(argss.length -1))` + */ + def appliedToArgss(argss: List[List[Tree]])(using Context): Tree = + argss.foldLeft(tree: Tree)(_.appliedToArgs(_)) + + /** The current tree applied to (): `tree()` */ + def appliedToNone(using Context): Apply = Apply(tree, Nil) + + /** The current tree applied to given type argument: `tree[targ]` */ + def appliedToType(targ: Type)(using Context): Tree = + appliedToTypes(targ :: Nil) + + /** The current tree applied to given type arguments: `tree[targ0, ..., targN]` */ + def appliedToTypes(targs: List[Type])(using Context): Tree = + appliedToTypeTrees(targs map (TypeTree(_))) + + /** The current tree applied to given type argument: `tree[targ]` */ + def appliedToTypeTree(targ: Tree)(using Context): Tree = + appliedToTypeTrees(targ :: Nil) + + /** The current tree applied to given type argument list: `tree[targs(0), ..., targs(targs.length - 1)]` */ + def appliedToTypeTrees(targs: List[Tree])(using Context): Tree = + if targs.isEmpty then tree else TypeApply(tree, targs) + + /** Apply to `()` unless tree's widened type is parameterless */ + def ensureApplied(using Context): Tree = + if (tree.tpe.widen.isParameterless) tree else tree.appliedToNone + + /** `tree == that` */ + def equal(that: Tree)(using Context): Tree = + if (that.tpe.widen.isRef(defn.NothingClass)) + Literal(Constant(false)) + else + applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) + + /** `tree.isInstanceOf[tp]`, with special treatment of singleton types */ + def isInstance(tp: Type)(using Context): Tree = tp.dealias match { + case ConstantType(c) if c.tag == StringTag => + singleton(tp).equal(tree) + case tp: SingletonType => + if tp.widen.derivesFrom(defn.ObjectClass) then + tree.ensureConforms(defn.ObjectType).select(defn.Object_eq).appliedTo(singleton(tp)) + else + singleton(tp).equal(tree) + case _ => + tree.select(defn.Any_isInstanceOf).appliedToType(tp) + } + + /** tree.asInstanceOf[`tp`] */ + def asInstance(tp: Type)(using Context): Tree = { + assert(tp.isValueType, i"bad cast: $tree.asInstanceOf[$tp]") + tree.select(defn.Any_asInstanceOf).appliedToType(tp) + } + + /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ + def cast(tp: Type)(using Context): Tree = cast(TypeTree(tp)) + + /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ + def cast(tpt: TypeTree)(using Context): Tree = + assert(tpt.tpe.isValueType, i"bad cast: $tree.asInstanceOf[$tpt]") + tree.select(if (ctx.erasedTypes) defn.Any_asInstanceOf else defn.Any_typeCast) + .appliedToTypeTree(tpt) + + /** cast `tree` to `tp` (or its box/unbox/cast equivalent when after + * erasure and value and non-value types are mixed), + * unless tree's type already conforms to `tp`. + */ + def ensureConforms(tp: Type)(using Context): Tree = + if (tree.tpe <:< tp) tree + else if (!ctx.erasedTypes) cast(tp) + else Erasure.Boxing.adaptToType(tree, tp) + + /** `tree ne null` (might need a cast to be type correct) */ + def testNotNull(using Context): Tree = { + // If the receiver is of type `Nothing` or `Null`, add an ascription or cast + // so that the selection succeeds. + // e.g. `null.ne(null)` doesn't type, but `(null: AnyRef).ne(null)` does. + val receiver = + if tree.tpe.isBottomType then + if ctx.explicitNulls then tree.cast(defn.AnyRefType) + else Typed(tree, TypeTree(defn.AnyRefType)) + else tree.ensureConforms(defn.ObjectType) + // also need to cast the null literal to AnyRef in explicit nulls + val nullLit = if ctx.explicitNulls then nullLiteral.cast(defn.AnyRefType) else nullLiteral + receiver.select(defn.Object_ne).appliedTo(nullLit).withSpan(tree.span) + } + + /** If inititializer tree is `_`, the default value of its type, + * otherwise the tree itself. + */ + def wildcardToDefault(using Context): Tree = + if (isWildcardArg(tree)) defaultValue(tree.tpe) else tree + + /** `this && that`, for boolean trees `this`, `that` */ + def and(that: Tree)(using Context): Tree = + tree.select(defn.Boolean_&&).appliedTo(that) + + /** `this || that`, for boolean trees `this`, `that` */ + def or(that: Tree)(using Context): Tree = + tree.select(defn.Boolean_||).appliedTo(that) + + /** The translation of `tree = rhs`. + * This is either the tree as an assignment, or a setter call. + */ + def becomes(rhs: Tree)(using Context): Tree = { + val sym = tree.symbol + if (sym.is(Method)) { + val setter = sym.setter.orElse { + assert(sym.name.isSetterName && sym.info.firstParamTypes.nonEmpty, sym) + sym + } + val qual = tree match { + case id: Ident => desugarIdentPrefix(id) + case Select(qual, _) => qual + } + qual.select(setter).appliedTo(rhs) + } + else Assign(tree, rhs) + } + + /** tree @annot + * + * works differently for type trees and term trees + */ + def annotated(annot: Tree)(using Context): Tree = + if (tree.isTerm) + Typed(tree, TypeTree(AnnotatedType(tree.tpe.widenIfUnstable, Annotation(annot)))) + else + Annotated(tree, annot) + + /** A synthetic select with that will be turned into an outer path by ExplicitOuter. + * @param levels How many outer levels to select + * @param tp The type of the destination of the outer path. + */ + def outerSelect(levels: Int, tp: Type)(using Context): Tree = + untpd.Select(tree, OuterSelectName(EmptyTermName, levels)).withType(SkolemType(tp)) + + /** Replace Inlined nodes and InlineProxy references to underlying arguments */ + def underlyingArgument(using Context): Tree = { + val mapToUnderlying = new MapToUnderlying { + /** Should get the rhs of this binding + * Returns true if the symbol is a val or def generated by eta-expansion/inline + */ + override protected def skipLocal(sym: Symbol): Boolean = + sym.isOneOf(InlineProxy | Synthetic) + } + mapToUnderlying.transform(tree) + } + + /** Replace Ident nodes references to the underlying tree that defined them */ + def underlying(using Context): Tree = MapToUnderlying().transform(tree) + + // --- Higher order traversal methods ------------------------------- + + /** Apply `f` to each subtree of this tree */ + def foreachSubTree(f: Tree => Unit)(using Context): Unit = { + val traverser = new TreeTraverser { + def traverse(tree: Tree)(using Context) = foldOver(f(tree), tree) + } + traverser.traverse(tree) + } + + /** Is there a subtree of this tree that satisfies predicate `p`? */ + def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { + val acc = new TreeAccumulator[Boolean] { + def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) + } + acc(false, tree) + } + + /** All subtrees of this tree that satisfy predicate `p`. */ + def filterSubTrees(f: Tree => Boolean)(using Context): List[Tree] = { + val buf = mutable.ListBuffer[Tree]() + foreachSubTree { tree => if (f(tree)) buf += tree } + buf.toList + } + + /** Set this tree as the `defTree` of its symbol and return this tree */ + def setDefTree(using Context): ThisTree = { + val sym = tree.symbol + if (sym.exists) sym.defTree = tree + tree + } + + def etaExpandCFT(using Context): Tree = + def expand(target: Tree, tp: Type)(using Context): Tree = tp match + case defn.ContextFunctionType(argTypes, resType, isErased) => + val anonFun = newAnonFun( + ctx.owner, + MethodType.companion(isContextual = true, isErased = isErased)(argTypes, resType), + coord = ctx.owner.coord) + def lambdaBody(refss: List[List[Tree]]) = + expand(target.select(nme.apply).appliedToArgss(refss), resType)( + using ctx.withOwner(anonFun)) + Closure(anonFun, lambdaBody) + case _ => + target + expand(tree, tree.tpe.widen) + } + + inline val MapRecursionLimit = 10 + + extension (trees: List[Tree]) + + /** A map that expands to a recursive function. It's equivalent to + * + * flatten(trees.mapConserve(op)) + * + * and falls back to it after `MaxRecursionLimit` recursions. + * Before that it uses a simpler method that uses stackspace + * instead of heap. + * Note `op` is duplicated in the generated code, so it should be + * kept small. + */ + inline def mapInline(inline op: Tree => Tree): List[Tree] = + def recur(trees: List[Tree], count: Int): List[Tree] = + if count > MapRecursionLimit then + // use a slower implementation that avoids stack overflows + flatten(trees.mapConserve(op)) + else trees match + case tree :: rest => + val tree1 = op(tree) + val rest1 = recur(rest, count + 1) + if (tree1 eq tree) && (rest1 eq rest) then trees + else tree1 match + case Thicket(elems1) => elems1 ::: rest1 + case _ => tree1 :: rest1 + case nil => nil + recur(trees, 0) + + /** Transform statements while maintaining import contexts and expression contexts + * in the same way as Typer does. The code addresses additional concerns: + * - be tail-recursive where possible + * - don't re-allocate trees where nothing has changed + */ + inline def mapStatements[T]( + exprOwner: Symbol, + inline op: Tree => Context ?=> Tree, + inline wrapResult: List[Tree] => Context ?=> T)(using Context): T = + @tailrec + def loop(mapped: mutable.ListBuffer[Tree] | Null, unchanged: List[Tree], pending: List[Tree])(using Context): T = + pending match + case stat :: rest => + val statCtx = stat match + case _: DefTree | _: ImportOrExport => ctx + case _ => ctx.exprContext(stat, exprOwner) + val stat1 = op(stat)(using statCtx) + val restCtx = stat match + case stat: Import => ctx.importContext(stat, stat.symbol) + case _ => ctx + if stat1 eq stat then + loop(mapped, unchanged, rest)(using restCtx) + else + val buf = if mapped == null then new mutable.ListBuffer[Tree] else mapped + var xc = unchanged + while xc ne pending do + buf += xc.head + xc = xc.tail + stat1 match + case Thicket(stats1) => buf ++= stats1 + case _ => buf += stat1 + loop(buf, rest, rest)(using restCtx) + case nil => + wrapResult( + if mapped == null then unchanged + else mapped.prependToList(unchanged)) + + loop(null, trees, trees) + end mapStatements + end extension + + /** A treemap that generates the same contexts as the original typer for statements. + * This means: + * - statements that are not definitions get the exprOwner as owner + * - imports are reflected in the contexts of subsequent statements + */ + class TreeMapWithPreciseStatContexts(cpy: TreeCopier = tpd.cpy) extends TreeMap(cpy): + def transformStats[T](trees: List[Tree], exprOwner: Symbol, wrapResult: List[Tree] => Context ?=> T)(using Context): T = + trees.mapStatements(exprOwner, transform(_), wrapResult) + final override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = + transformStats(trees, exprOwner, sameStats) + override def transformBlock(blk: Block)(using Context) = + transformStats(blk.stats, ctx.owner, + stats1 => ctx ?=> cpy.Block(blk)(stats1, transform(blk.expr))) + + val sameStats: List[Tree] => Context ?=> List[Tree] = stats => stats + + /** Map Inlined nodes, NamedArgs, Blocks with no statements and local references to underlying arguments. + * Also drops Inline and Block with no statements. + */ + private class MapToUnderlying extends TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match { + case tree: Ident if isBinding(tree.symbol) && skipLocal(tree.symbol) => + tree.symbol.defTree match { + case defTree: ValOrDefDef => + val rhs = defTree.rhs + assert(!rhs.isEmpty) + transform(rhs) + case _ => tree + } + case Inlined(_, Nil, arg) => transform(arg) + case Block(Nil, arg) => transform(arg) + case NamedArg(_, arg) => transform(arg) + case tree => super.transform(tree) + } + + /** Should get the rhs of this binding */ + protected def skipLocal(sym: Symbol): Boolean = true + + /** Is this a symbol that of a local val or parameterless def for which we could get the rhs */ + private def isBinding(sym: Symbol)(using Context): Boolean = + sym.isTerm && !sym.is(Param) && !sym.owner.isClass && + !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless + } + + extension (xs: List[tpd.Tree]) + def tpes: List[Type] = xs match { + case x :: xs1 => x.tpe :: xs1.tpes + case nil => Nil + } + + /** A trait for loaders that compute trees. Currently implemented just by DottyUnpickler. */ + trait TreeProvider { + protected def computeRootTrees(using Context): List[Tree] + + private var myTrees: List[Tree] | Null = _ + + /** Get trees defined by this provider. Cache them if -Yretain-trees is set. */ + def rootTrees(using Context): List[Tree] = + if (ctx.settings.YretainTrees.value) { + if (myTrees == null) myTrees = computeRootTrees + myTrees.uncheckedNN + } + else computeRootTrees + + /** Get first tree defined by this provider, or EmptyTree if none exists */ + def tree(using Context): Tree = + rootTrees.headOption.getOrElse(EmptyTree) + + /** Is it possible that the tree to load contains a definition of or reference to `id`? */ + def mightContain(id: String)(using Context): Boolean = true + } + + // convert a numeric with a toXXX method + def primitiveConversion(tree: Tree, numericCls: Symbol)(using Context): Tree = { + val mname = "to".concat(numericCls.name) + val conversion = tree.tpe member(mname) + if (conversion.symbol.exists) + tree.select(conversion.symbol.termRef).ensureApplied + else if (tree.tpe.widen isRef numericCls) + tree + else { + report.warning(em"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") + Throw(New(defn.ClassCastExceptionClass.typeRef, Nil)).withSpan(tree.span) + } + } + + /** A tree that corresponds to `Predef.classOf[$tp]` in source */ + def clsOf(tp: Type)(using Context): Tree = + if ctx.erasedTypes && !tp.isRef(defn.UnitClass) then + Literal(Constant(TypeErasure.erasure(tp))) + else + Literal(Constant(tp)) + + @tailrec + def sameTypes(trees: List[tpd.Tree], trees1: List[tpd.Tree]): Boolean = + if (trees.isEmpty) trees.isEmpty + else if (trees1.isEmpty) trees.isEmpty + else (trees.head.tpe eq trees1.head.tpe) && sameTypes(trees.tail, trees1.tail) + + /** If `tree`'s purity level is less than `level`, let-bind it so that it gets evaluated + * only once. I.e. produce a + * + * { val x = 'tree ; ~within('x) } + * + * instead of otherwise + * + * ~within('tree) + */ + def letBindUnless(level: TreeInfo.PurityLevel, tree: Tree)(within: Tree => Tree)(using Context): Tree = + if (exprPurity(tree) >= level) within(tree) + else { + val vdef = SyntheticValDef(TempResultName.fresh(), tree) + Block(vdef :: Nil, within(Ident(vdef.namedType))) + } + + /** Let bind `tree` unless `tree` is at least idempotent */ + def evalOnce(tree: Tree)(within: Tree => Tree)(using Context): Tree = + letBindUnless(TreeInfo.Idempotent, tree)(within) + + def runtimeCall(name: TermName, args: List[Tree])(using Context): Tree = + Ident(defn.ScalaRuntimeModule.requiredMethod(name).termRef).appliedToTermArgs(args) + + /** An extractor that pulls out type arguments */ + object MaybePoly: + def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match + case TypeApply(tree, targs) => Some(tree, targs) + case _ => Some(tree, Nil) + + object TypeArgs: + def unapply(ts: List[Tree]): Option[List[Tree]] = + if ts.nonEmpty && ts.head.isType then Some(ts) else None + + /** Split argument clauses into a leading type argument clause if it exists and + * remaining clauses + */ + def splitArgs(argss: List[List[Tree]]): (List[Tree], List[List[Tree]]) = argss match + case TypeArgs(targs) :: argss1 => (targs, argss1) + case _ => (Nil, argss) + + def joinArgs(targs: List[Tree], argss: List[List[Tree]]): List[List[Tree]] = + if targs.isEmpty then argss else targs :: argss + + /** A key to be used in a context property that tracks enclosing inlined calls */ + private val InlinedCalls = Property.Key[List[Tree]]() + + /** A key to be used in a context property that tracks the number of inlined trees */ + private val InlinedTrees = Property.Key[Counter]() + final class Counter { + var count: Int = 0 + } + + /** Record an enclosing inlined call. + * EmptyTree calls (for parameters) cancel the next-enclosing call in the list instead of being added to it. + * We assume parameters are never nested inside parameters. + */ + override def inlineContext(call: Tree)(using Context): Context = { + // We assume enclosingInlineds is already normalized, and only process the new call with the head. + val oldIC = enclosingInlineds + + val newIC = + if call.isEmpty then + oldIC match + case t1 :: ts2 => ts2 + case _ => oldIC + else + call :: oldIC + + val ctx1 = ctx.fresh.setProperty(InlinedCalls, newIC) + if oldIC.isEmpty then ctx1.setProperty(InlinedTrees, new Counter) else ctx1 + } + + /** All enclosing calls that are currently inlined, from innermost to outermost. + */ + def enclosingInlineds(using Context): List[Tree] = + ctx.property(InlinedCalls).getOrElse(Nil) + + /** Record inlined trees */ + def addInlinedTrees(n: Int)(using Context): Unit = + ctx.property(InlinedTrees).foreach(_.count += n) + + /** Check if the limit on the number of inlined trees has been reached */ + def reachedInlinedTreesLimit(using Context): Boolean = + ctx.property(InlinedTrees) match + case Some(c) => c.count > ctx.settings.XmaxInlinedTrees.value + case None => false + + /** The source file where the symbol of the `inline` method referred to by `call` + * is defined + */ + def sourceFile(call: Tree)(using Context): SourceFile = call.symbol.source + + /** Desugar identifier into a select node. Return the tree itself if not possible */ + def desugarIdent(tree: Ident)(using Context): RefTree = { + val qual = desugarIdentPrefix(tree) + if (qual.isEmpty) tree + else qual.select(tree.symbol) + } + + /** Recover identifier prefix (e.g. this) if it exists */ + def desugarIdentPrefix(tree: Ident)(using Context): Tree = tree.tpe match { + case TermRef(prefix: TermRef, _) => + prefix.info match + case mt: MethodType if mt.paramInfos.isEmpty && mt.resultType.typeSymbol.is(Module) => + ref(mt.resultType.typeSymbol.sourceModule) + case _ => + ref(prefix) + case TermRef(prefix: ThisType, _) => + This(prefix.cls) + case _ => + EmptyTree + } + + /** + * The symbols that are imported with `expr.name` + * + * @param expr The base of the import statement + * @param name The name that is being imported. + * @return All the symbols that would be imported with `expr.name`. + */ + def importedSymbols(expr: Tree, name: Name)(using Context): List[Symbol] = { + def lookup(name: Name): Symbol = expr.tpe.member(name).symbol + val symbols = + List(lookup(name.toTermName), + lookup(name.toTypeName), + lookup(name.moduleClassName), + lookup(name.sourceModuleName)) + + symbols.map(_.sourceSymbol).filter(_.exists).distinct + } + + /** + * All the symbols that are imported by the first selector of `imp` that matches + * `selectorPredicate`. + * + * @param imp The import statement to analyze + * @param selectorPredicate A test to find the selector to use. + * @return The symbols imported. + */ + def importedSymbols(imp: Import, + selectorPredicate: untpd.ImportSelector -> Boolean = util.common.alwaysTrue) + (using Context): List[Symbol] = + imp.selectors.find(selectorPredicate) match + case Some(sel) => importedSymbols(imp.expr, sel.name) + case _ => Nil + + /** + * The list of select trees that resolve to the same symbols as the ones that are imported + * by `imp`. + */ + def importSelections(imp: Import)(using Context): List[Select] = { + def imported(sym: Symbol, id: untpd.Ident, rename: Option[untpd.Ident]): List[Select] = { + // Give a zero-extent position to the qualifier to prevent it from being included several + // times in results in the language server. + val noPosExpr = focusPositions(imp.expr) + val selectTree = Select(noPosExpr, sym.name).withSpan(id.span) + rename match { + case None => + selectTree :: Nil + case Some(rename) => + // Get the type of the symbol that is actually selected, and construct a select + // node with the new name and the type of the real symbol. + val name = if (sym.name.isTypeName) rename.name.toTypeName else rename.name + val actual = Select(noPosExpr, sym.name) + val renameTree = Select(noPosExpr, name).withSpan(rename.span).withType(actual.tpe) + selectTree :: renameTree :: Nil + } + } + + imp.selectors.flatMap { sel => + if sel.isWildcard then Nil + else + val renamedOpt = sel.renamed match + case renamed: untpd.Ident => Some(renamed) + case untpd.EmptyTree => None + importedSymbols(imp.expr, sel.name).flatMap { sym => + imported(sym, sel.imported, renamedOpt) + } + } + } + + /** Creates the tuple type tree repesentation of the type trees in `ts` */ + def tupleTypeTree(elems: List[Tree])(using Context): Tree = { + val arity = elems.length + if arity <= Definitions.MaxTupleArity then + val tupleTp = defn.TupleType(arity) + if tupleTp != null then + AppliedTypeTree(TypeTree(tupleTp), elems) + else nestedPairsTypeTree(elems) + else nestedPairsTypeTree(elems) + } + + /** Creates the nested pairs type tree repesentation of the type trees in `ts` */ + def nestedPairsTypeTree(ts: List[Tree])(using Context): Tree = + ts.foldRight[Tree](TypeTree(defn.EmptyTupleModule.termRef))((x, acc) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), x :: acc :: Nil)) + + /** Replaces all positions in `tree` with zero-extent positions */ + private def focusPositions(tree: Tree)(using Context): Tree = { + val transformer = new tpd.TreeMap { + override def transform(tree: Tree)(using Context): Tree = + super.transform(tree).withSpan(tree.span.focus) + } + transformer.transform(tree) + } + + /** Convert a list of trees to a vararg-compatible tree. + * Used to make arguments for methods that accept varargs. + */ + def repeated(trees: List[Tree], tpt: Tree)(using Context): Tree = + ctx.typeAssigner.arrayToRepeated(JavaSeqLiteral(trees, tpt)) + + /** Create a tree representing a list containing all + * the elements of the argument list. A "list of tree to + * tree of list" conversion. + * + * @param trees the elements the list represented by + * the resulting tree should contain. + * @param tpe the type of the elements of the resulting list. + * + */ + def mkList(trees: List[Tree], tpt: Tree)(using Context): Tree = + ref(defn.ListModule).select(nme.apply) + .appliedToTypeTree(tpt) + .appliedToVarargs(trees, tpt) + + + protected def FunProto(args: List[Tree], resType: Type)(using Context) = + ProtoTypes.FunProtoTyped(args, resType)(ctx.typer, ApplyKind.Regular) +} diff --git a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala new file mode 100644 index 000000000000..a6d3bc5a072c --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala @@ -0,0 +1,829 @@ +package dotty.tools +package dotc +package ast + +import core._ +import Types._, Contexts._, Constants._, Names._, Flags._ +import dotty.tools.dotc.typer.ProtoTypes +import Symbols._, StdNames._, Trees._ +import util.{Property, SourceFile, NoSource} +import util.Spans.Span +import annotation.constructorOnly +import annotation.internal.sharable +import Decorators._ +import annotation.retains +import language.experimental.pureFunctions + +object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { + + // ----- Tree cases that exist in untyped form only ------------------ + + abstract class OpTree(implicit @constructorOnly src: SourceFile) extends Tree { + def op: Ident + override def isTerm: Boolean = op.isTerm + override def isType: Boolean = op.isType + } + + /** A typed subtree of an untyped tree needs to be wrapped in a TypedSplice + * @param owner The current owner at the time the tree was defined + * @param isExtensionReceiver The splice was created from the receiver `e` in an extension + * method call `e.f(...)` + */ + abstract case class TypedSplice(splice: tpd.Tree)(val owner: Symbol, val isExtensionReceiver: Boolean)(implicit @constructorOnly src: SourceFile) extends ProxyTree { + def forwardTo: tpd.Tree = splice + override def toString = + def ext = if isExtensionReceiver then ", isExtensionReceiver = true" else "" + s"TypedSplice($splice$ext)" + } + + object TypedSplice { + def apply(tree: tpd.Tree, isExtensionReceiver: Boolean = false)(using Context): TypedSplice = + val owner = ctx.owner + given SourceFile = ctx.source + new TypedSplice(tree)(owner, isExtensionReceiver) {} + } + + /** mods object name impl */ + case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) + extends MemberDef { + type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef + def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) + } + + /** An untyped template with a derives clause. Derived parents are added to the end + * of the `parents` list. `derivedCount` keeps track of how many there are. + * This representation was chosen because it balances two concerns: + * - maximize overlap between DerivingTemplate and Template for code streamlining + * - keep invariant that elements of untyped trees align with source positions + */ + class DerivingTemplate(constr: DefDef, parentsOrDerived: List[Tree], self: ValDef, preBody: LazyTreeList, derivedCount: Int)(implicit @constructorOnly src: SourceFile) + extends Template(constr, parentsOrDerived, self, preBody) { + override val parents = parentsOrDerived.dropRight(derivedCount) + override val derived = parentsOrDerived.takeRight(derivedCount) + } + + case class ParsedTry(expr: Tree, handler: Tree, finalizer: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + + case class SymbolLit(str: String)(implicit @constructorOnly src: SourceFile) extends TermTree + + /** An interpolated string + * @param segments a list of two element tickets consisting of string literal and argument tree, + * possibly with a simple string literal as last element of the list + */ + case class InterpolatedString(id: TermName, segments: List[Tree])(implicit @constructorOnly src: SourceFile) + extends TermTree + + /** A function type or closure */ + case class Function(args: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { + override def isTerm: Boolean = body.isTerm + override def isType: Boolean = body.isType + } + + /** A function type or closure with `implicit`, `erased`, or `given` modifiers */ + class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers)(implicit @constructorOnly src: SourceFile) + extends Function(args, body) + + /** A polymorphic function type */ + case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { + override def isTerm = body.isTerm + override def isType = body.isType + } + + /** A function created from a wildcard expression + * @param placeholderParams a list of definitions of synthetic parameters. + * @param body the function body where wildcards are replaced by + * references to synthetic parameters. + * This is equivalent to Function, except that forms a special case for the overlapping + * positions tests. + */ + class WildcardFunction(placeholderParams: List[ValDef], body: Tree)(implicit @constructorOnly src: SourceFile) + extends Function(placeholderParams, body) + + case class InfixOp(left: Tree, op: Ident, right: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree + case class PostfixOp(od: Tree, op: Ident)(implicit @constructorOnly src: SourceFile) extends OpTree + case class PrefixOp(op: Ident, od: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree + case class Parens(t: Tree)(implicit @constructorOnly src: SourceFile) extends ProxyTree { + def forwardTo: Tree = t + } + case class Tuple(trees: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree { + override def isTerm: Boolean = trees.isEmpty || trees.head.isTerm + override def isType: Boolean = !isTerm + } + case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class Quote(quoted: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class Splice(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree { + def isInBraces: Boolean = span.end != expr.span.end + } + case class ForYield(enums: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class ForDo(enums: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree + case class GenFrom(pat: Tree, expr: Tree, checkMode: GenCheckMode)(implicit @constructorOnly src: SourceFile) extends Tree + case class GenAlias(pat: Tree, expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree + case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree + case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree + case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class Into(tpt: Tree)(implicit @constructorOnly src: SourceFile) extends Tree + case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree + + case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { + // TODO: Make bound a typed tree? + + /** It's a `given` selector */ + val isGiven: Boolean = imported.name.isEmpty + + /** It's a `given` or `_` selector */ + val isWildcard: Boolean = isGiven || imported.name == nme.WILDCARD + + /** The imported name, EmptyTermName if it's a given selector */ + val name: TermName = imported.name.asInstanceOf[TermName] + + /** The renamed part (which might be `_`), if present, or `name`, if missing */ + val rename: TermName = renamed match + case Ident(rename: TermName) => rename + case _ => name + } + + case class Number(digits: String, kind: NumberKind)(implicit @constructorOnly src: SourceFile) extends TermTree + + enum NumberKind { + case Whole(radix: Int) + case Decimal + case Floating + } + + /** {x1, ..., xN} T (only relevant under captureChecking) */ + case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree + + /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ + case class DependentTypeTree(tp: List[Symbol] -> Context ?-> Type)(implicit @constructorOnly src: SourceFile) extends Tree + + @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped] { + override def isEmpty: Boolean = true + } + + def WildcardTypeBoundsTree()(using src: SourceFile): TypeBoundsTree = TypeBoundsTree(EmptyTree, EmptyTree, EmptyTree) + object WildcardTypeBoundsTree: + def unapply(tree: untpd.Tree): Boolean = tree match + case TypeBoundsTree(EmptyTree, EmptyTree, _) => true + case _ => false + + + /** A block generated by the XML parser, only treated specially by + * `Positioned#checkPos` */ + class XMLBlock(stats: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends Block(stats, expr) + + /** An enum to control checking or filtering of patterns in GenFrom trees */ + enum GenCheckMode { + case Ignore // neither filter nor check since filtering was done before + case Check // check that pattern is irrefutable + case CheckAndFilter // both check and filter (transitional period starting with 3.2) + case FilterNow // filter out non-matching elements if we are not in 3.2 or later + case FilterAlways // filter out non-matching elements since pattern is prefixed by `case` + } + + // ----- Modifiers ----------------------------------------------------- + /** Mod is intended to record syntactic information about modifiers, it's + * NOT a replacement of FlagSet. + * + * For any query about semantic information, check `flags` instead. + */ + sealed abstract class Mod(val flags: FlagSet)(implicit @constructorOnly src: SourceFile) + extends Positioned + + object Mod { + case class Private()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Private) + + case class Protected()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Protected) + + case class Var()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Mutable) + + case class Implicit()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Implicit) + + case class Given()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Given) + + case class Erased()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Erased) + + case class Final()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Final) + + case class Sealed()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Sealed) + + case class Opaque()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Opaque) + + case class Open()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Open) + + case class Override()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Override) + + case class Abstract()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Abstract) + + case class Lazy()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Lazy) + + case class Inline()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Inline) + + case class Transparent()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Transparent) + + case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) + + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ + case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) + } + + /** Modifiers and annotations for definitions + * + * @param flags The set flags + * @param privateWithin If a private or protected has is followed by a + * qualifier [q], the name q, "" as a typename otherwise. + * @param annotations The annotations preceding the modifiers + */ + case class Modifiers ( + flags: FlagSet = EmptyFlags, + privateWithin: TypeName = tpnme.EMPTY, + annotations: List[Tree] = Nil, + mods: List[Mod] = Nil) { + + def is(flag: Flag): Boolean = flags.is(flag) + def is(flag: Flag, butNot: FlagSet): Boolean = flags.is(flag, butNot = butNot) + def isOneOf(fs: FlagSet): Boolean = flags.isOneOf(fs) + def isOneOf(fs: FlagSet, butNot: FlagSet): Boolean = flags.isOneOf(fs, butNot = butNot) + def isAllOf(fc: FlagSet): Boolean = flags.isAllOf(fc) + + def | (fs: FlagSet): Modifiers = withFlags(flags | fs) + def & (fs: FlagSet): Modifiers = withFlags(flags & fs) + def &~(fs: FlagSet): Modifiers = withFlags(flags &~ fs) + + def toTypeFlags: Modifiers = withFlags(flags.toTypeFlags) + def toTermFlags: Modifiers = withFlags(flags.toTermFlags) + + def withFlags(flags: FlagSet): Modifiers = + if (this.flags == flags) this + else copy(flags = flags) + + def withoutFlags(flags: FlagSet): Modifiers = + if (this.isOneOf(flags)) + Modifiers(this.flags &~ flags, this.privateWithin, this.annotations, this.mods.filterNot(_.flags.isOneOf(flags))) + else this + + def withAddedMod(mod: Mod): Modifiers = + if (mods.exists(_ eq mod)) this + else withMods(mods :+ mod) + + private def compatible(flags1: FlagSet, flags2: FlagSet): Boolean = + flags1.isEmpty || flags2.isEmpty + || flags1.isTermFlags && flags2.isTermFlags + || flags1.isTypeFlags && flags2.isTypeFlags + + /** Add `flags` to thos modifier set, checking that there are no type/term conflicts. + * If there are conflicts, issue an error and return the modifiers consisting of + * the added flags only. The reason to do it this way is that the added flags usually + * describe the core of a construct whereas the existing set are the modifiers + * given in the source. + */ + def withAddedFlags(flags: FlagSet, span: Span)(using Context): Modifiers = + if this.flags.isAllOf(flags) then this + else if compatible(this.flags, flags) then this | flags + else + val what = if flags.isTermFlags then "values" else "types" + report.error(em"${(flags & ModifierFlags).flagsString} $what cannot be ${this.flags.flagsString}", ctx.source.atSpan(span)) + Modifiers(flags) + + /** Modifiers with given list of Mods. It is checked that + * all modifiers are already accounted for in `flags` and `privateWithin`. + */ + def withMods(ms: List[Mod]): Modifiers = + if (mods eq ms) this + else { + if (ms.nonEmpty) + for (m <- ms) + assert(flags.isAllOf(m.flags) + || m.isInstanceOf[Mod.Private] && !privateWithin.isEmpty + || (m.isInstanceOf[Mod.Abstract] || m.isInstanceOf[Mod.Override]) && flags.is(AbsOverride), + s"unaccounted modifier: $m in $this with flags ${flags.flagsString} when adding $ms") + copy(mods = ms) + } + + def withAddedAnnotation(annot: Tree): Modifiers = + if (annotations.exists(_ eq annot)) this + else withAnnotations(annotations :+ annot) + + def withAnnotations(annots: List[Tree]): Modifiers = + if (annots eq annotations) this + else copy(annotations = annots) + + def withPrivateWithin(pw: TypeName): Modifiers = + if (pw.isEmpty) this + else copy(privateWithin = pw) + + def hasFlags: Boolean = flags != EmptyFlags + def hasAnnotations: Boolean = annotations.nonEmpty + def hasPrivateWithin: Boolean = privateWithin != tpnme.EMPTY + def hasMod(cls: Class[?]) = mods.exists(_.getClass == cls) + + private def isEnum = is(Enum, butNot = JavaDefined) + + def isEnumCase: Boolean = isEnum && is(Case) + def isEnumClass: Boolean = isEnum && !is(Case) + } + + @sharable val EmptyModifiers: Modifiers = Modifiers() + + // ----- TypeTrees that refer to other tree's symbols ------------------- + + /** A type tree that gets its type from some other tree's symbol. Enters the + * type tree in the References attachment of the `from` tree as a side effect. + */ + abstract class DerivedTypeTree(implicit @constructorOnly src: SourceFile) extends TypeTree { + + private var myWatched: Tree = EmptyTree + + /** The watched tree; used only for printing */ + def watched: Tree = myWatched + + /** Install the derived type tree as a dependency on `original` */ + def watching(original: DefTree): this.type = { + myWatched = original + val existing = original.attachmentOrElse(References, Nil) + original.putAttachment(References, this :: existing) + this + } + + /** Install the derived type tree as a dependency on `sym` */ + def watching(sym: Symbol): this.type = withAttachment(OriginalSymbol, sym) + + /** A hook to ensure that all necessary symbols are completed so that + * OriginalSymbol attachments are propagated to this tree + */ + def ensureCompletions(using Context): Unit = () + + /** The method that computes the tree with the derived type */ + def derivedTree(originalSym: Symbol)(using Context): tpd.Tree + } + + /** Property key containing TypeTrees whose type is computed + * from the symbol in this type. These type trees have marker trees + * TypeRefOfSym or InfoOfSym as their originals. + */ + val References: Property.Key[List[DerivedTypeTree]] = Property.Key() + + /** Property key for TypeTrees marked with TypeRefOfSym or InfoOfSym + * which contains the symbol of the original tree from which this + * TypeTree is derived. + */ + val OriginalSymbol: Property.Key[Symbol] = Property.Key() + + /** Property key for contextual Apply trees of the form `fn given arg` */ + val KindOfApply: Property.StickyKey[ApplyKind] = Property.StickyKey() + + // ------ Creation methods for untyped only ----------------- + + def Ident(name: Name)(implicit src: SourceFile): Ident = new Ident(name) + def SearchFailureIdent(name: Name, explanation: -> String)(implicit src: SourceFile): SearchFailureIdent = new SearchFailureIdent(name, explanation) + def Select(qualifier: Tree, name: Name)(implicit src: SourceFile): Select = new Select(qualifier, name) + def SelectWithSig(qualifier: Tree, name: Name, sig: Signature)(implicit src: SourceFile): Select = new SelectWithSig(qualifier, name, sig) + def This(qual: Ident)(implicit src: SourceFile): This = new This(qual) + def Super(qual: Tree, mix: Ident)(implicit src: SourceFile): Super = new Super(qual, mix) + def Apply(fun: Tree, args: List[Tree])(implicit src: SourceFile): Apply = new Apply(fun, args) + def TypeApply(fun: Tree, args: List[Tree])(implicit src: SourceFile): TypeApply = new TypeApply(fun, args) + def Literal(const: Constant)(implicit src: SourceFile): Literal = new Literal(const) + def New(tpt: Tree)(implicit src: SourceFile): New = new New(tpt) + def Typed(expr: Tree, tpt: Tree)(implicit src: SourceFile): Typed = new Typed(expr, tpt) + def NamedArg(name: Name, arg: Tree)(implicit src: SourceFile): NamedArg = new NamedArg(name, arg) + def Assign(lhs: Tree, rhs: Tree)(implicit src: SourceFile): Assign = new Assign(lhs, rhs) + def Block(stats: List[Tree], expr: Tree)(implicit src: SourceFile): Block = new Block(stats, expr) + def If(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new If(cond, thenp, elsep) + def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new InlineIf(cond, thenp, elsep) + def Closure(env: List[Tree], meth: Tree, tpt: Tree)(implicit src: SourceFile): Closure = new Closure(env, meth, tpt) + def Match(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new Match(selector, cases) + def InlineMatch(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new InlineMatch(selector, cases) + def CaseDef(pat: Tree, guard: Tree, body: Tree)(implicit src: SourceFile): CaseDef = new CaseDef(pat, guard, body) + def Labeled(bind: Bind, expr: Tree)(implicit src: SourceFile): Labeled = new Labeled(bind, expr) + def Return(expr: Tree, from: Tree)(implicit src: SourceFile): Return = new Return(expr, from) + def WhileDo(cond: Tree, body: Tree)(implicit src: SourceFile): WhileDo = new WhileDo(cond, body) + def Try(expr: Tree, cases: List[CaseDef], finalizer: Tree)(implicit src: SourceFile): Try = new Try(expr, cases, finalizer) + def SeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): SeqLiteral = new SeqLiteral(elems, elemtpt) + def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): JavaSeqLiteral = new JavaSeqLiteral(elems, elemtpt) + def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit src: SourceFile): Inlined = new Inlined(call, bindings, expansion) + def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() + def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() + def SingletonTypeTree(ref: Tree)(implicit src: SourceFile): SingletonTypeTree = new SingletonTypeTree(ref) + def RefinedTypeTree(tpt: Tree, refinements: List[Tree])(implicit src: SourceFile): RefinedTypeTree = new RefinedTypeTree(tpt, refinements) + def AppliedTypeTree(tpt: Tree, args: List[Tree])(implicit src: SourceFile): AppliedTypeTree = new AppliedTypeTree(tpt, args) + def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(implicit src: SourceFile): LambdaTypeTree = new LambdaTypeTree(tparams, body) + def TermLambdaTypeTree(params: List[ValDef], body: Tree)(implicit src: SourceFile): TermLambdaTypeTree = new TermLambdaTypeTree(params, body) + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): MatchTypeTree = new MatchTypeTree(bound, selector, cases) + def ByNameTypeTree(result: Tree)(implicit src: SourceFile): ByNameTypeTree = new ByNameTypeTree(result) + def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(implicit src: SourceFile): TypeBoundsTree = new TypeBoundsTree(lo, hi, alias) + def Bind(name: Name, body: Tree)(implicit src: SourceFile): Bind = new Bind(name, body) + def Alternative(trees: List[Tree])(implicit src: SourceFile): Alternative = new Alternative(trees) + def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree])(implicit src: SourceFile): UnApply = new UnApply(fun, implicits, patterns) + def ValDef(name: TermName, tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): ValDef = new ValDef(name, tpt, rhs) + def DefDef(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): DefDef = new DefDef(name, paramss, tpt, rhs) + def TypeDef(name: TypeName, rhs: Tree)(implicit src: SourceFile): TypeDef = new TypeDef(name, rhs) + def Template(constr: DefDef, parents: List[Tree], derived: List[Tree], self: ValDef, body: LazyTreeList)(implicit src: SourceFile): Template = + if (derived.isEmpty) new Template(constr, parents, self, body) + else new DerivingTemplate(constr, parents ++ derived, self, body, derived.length) + def Import(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Import = new Import(expr, selectors) + def Export(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Export = new Export(expr, selectors) + def PackageDef(pid: RefTree, stats: List[Tree])(implicit src: SourceFile): PackageDef = new PackageDef(pid, stats) + def Annotated(arg: Tree, annot: Tree)(implicit src: SourceFile): Annotated = new Annotated(arg, annot) + def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(implicit src: SourceFile): Hole = new Hole(isTermHole, idx, args, content, tpt) + + // ------ Additional creation methods for untyped only ----------------- + + /** new T(args1)...(args_n) + * ==> + * new T.[Ts](args1)...(args_n) + * + * where `Ts` are the class type arguments of `T` or its class type alias. + * Note: we also keep any type arguments as parts of `T`. This is necessary to allow + * navigation into these arguments from the IDE, and to do the right thing in + * PrepareInlineable. + */ + def New(tpt: Tree, argss: List[List[Tree]])(using Context): Tree = + ensureApplied(argss.foldLeft(makeNew(tpt))(Apply(_, _))) + + /** A new expression with constrictor and possibly type arguments. See + * `New(tpt, argss)` for details. + */ + def makeNew(tpt: Tree)(using Context): Tree = { + val (tycon, targs) = tpt match { + case AppliedTypeTree(tycon, targs) => + (tycon, targs) + case TypedSplice(tpt1: tpd.Tree) => + val argTypes = tpt1.tpe.dealias.argTypesLo + def wrap(tpe: Type) = TypeTree(tpe).withSpan(tpt.span) + (tpt, argTypes.map(wrap)) + case _ => + (tpt, Nil) + } + val nu: Tree = Select(New(tycon), nme.CONSTRUCTOR) + if (targs.nonEmpty) TypeApply(nu, targs) else nu + } + + def Block(stat: Tree, expr: Tree)(implicit src: SourceFile): Block = + Block(stat :: Nil, expr) + + def Apply(fn: Tree, arg: Tree)(implicit src: SourceFile): Apply = + Apply(fn, arg :: Nil) + + def ensureApplied(tpt: Tree)(implicit src: SourceFile): Tree = tpt match { + case _: Apply => tpt + case _ => Apply(tpt, Nil) + } + + def AppliedTypeTree(tpt: Tree, arg: Tree)(implicit src: SourceFile): AppliedTypeTree = + AppliedTypeTree(tpt, arg :: Nil) + + def TypeTree(tpe: Type)(using Context): TypedSplice = + TypedSplice(TypeTree().withTypeUnchecked(tpe)) + + def InferredTypeTree(tpe: Type)(using Context): TypedSplice = + TypedSplice(new InferredTypeTree().withTypeUnchecked(tpe)) + + def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())) + + def ref(tp: NamedType)(using Context): Tree = + TypedSplice(tpd.ref(tp)) + + def ref(sym: Symbol)(using Context): Tree = + TypedSplice(tpd.ref(sym)) + + def rawRef(tp: NamedType)(using Context): Tree = + if tp.typeParams.isEmpty then ref(tp) + else AppliedTypeTree(ref(tp), tp.typeParams.map(_ => WildcardTypeBoundsTree())) + + def rootDot(name: Name)(implicit src: SourceFile): Select = Select(Ident(nme.ROOTPKG), name) + def scalaDot(name: Name)(implicit src: SourceFile): Select = Select(rootDot(nme.scala), name) + def scalaAnnotationDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.annotation), name) + def scalaRuntimeDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.runtime), name) + def scalaUnit(implicit src: SourceFile): Select = scalaDot(tpnme.Unit) + def scalaAny(implicit src: SourceFile): Select = scalaDot(tpnme.Any) + def javaDotLangDot(name: Name)(implicit src: SourceFile): Select = Select(Select(Ident(nme.java), nme.lang), name) + + def captureRoot(using Context): Select = + Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) + + def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = + DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) + + def emptyConstructor(using Context): DefDef = + makeConstructor(Nil, Nil) + + def makeSelfDef(name: TermName, tpt: Tree)(using Context): ValDef = + ValDef(name, tpt, EmptyTree).withFlags(PrivateLocal) + + def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match { + case t :: Nil => Parens(t) + case _ => Tuple(ts) + } + + def makeTuple(ts: List[Tree])(using Context): Tree = ts match { + case t :: Nil => t + case _ => Tuple(ts) + } + + def makeAndType(left: Tree, right: Tree)(using Context): AppliedTypeTree = + AppliedTypeTree(ref(defn.andType.typeRef), left :: right :: Nil) + + def makeParameter(pname: TermName, tpe: Tree, mods: Modifiers, isBackquoted: Boolean = false)(using Context): ValDef = { + val vdef = ValDef(pname, tpe, EmptyTree) + if (isBackquoted) vdef.pushAttachment(Backquoted, ()) + vdef.withMods(mods | Param) + } + + def makeSyntheticParameter(n: Int = 1, tpt: Tree | Null = null, flags: FlagSet = SyntheticTermParam)(using Context): ValDef = + ValDef(nme.syntheticParamName(n), if (tpt == null) TypeTree() else tpt, EmptyTree) + .withFlags(flags) + + def lambdaAbstract(params: List[ValDef] | List[TypeDef], tpt: Tree)(using Context): Tree = + params match + case Nil => tpt + case (vd: ValDef) :: _ => TermLambdaTypeTree(params.asInstanceOf[List[ValDef]], tpt) + case _ => LambdaTypeTree(params.asInstanceOf[List[TypeDef]], tpt) + + def lambdaAbstractAll(paramss: List[List[ValDef] | List[TypeDef]], tpt: Tree)(using Context): Tree = + paramss.foldRight(tpt)(lambdaAbstract) + + /** A reference to given definition. If definition is a repeated + * parameter, the reference will be a repeated argument. + */ + def refOfDef(tree: MemberDef)(using Context): Tree = tree match { + case ValDef(_, PostfixOp(_, Ident(tpnme.raw.STAR)), _) => repeated(Ident(tree.name)) + case _ => Ident(tree.name) + } + + /** A repeated argument such as `arg: _*` */ + def repeated(arg: Tree)(using Context): Typed = Typed(arg, Ident(tpnme.WILDCARD_STAR)) + + +// --------- Copier/Transformer/Accumulator classes for untyped trees ----- + + def localCtx(tree: Tree)(using Context): Context = ctx + + override val cpy: UntypedTreeCopier = UntypedTreeCopier() + + class UntypedTreeCopier extends TreeCopier { + + def postProcess(tree: Tree, copied: Tree): copied.ThisTree[Untyped] = + copied.asInstanceOf[copied.ThisTree[Untyped]] + + def postProcess(tree: Tree, copied: MemberDef): copied.ThisTree[Untyped] = { + tree match { + case tree: MemberDef => copied.withMods(tree.rawMods) + case _ => copied + } + }.asInstanceOf[copied.ThisTree[Untyped]] + + def ModuleDef(tree: Tree)(name: TermName, impl: Template)(using Context): ModuleDef = tree match { + case tree: ModuleDef if (name eq tree.name) && (impl eq tree.impl) => tree + case _ => finalize(tree, untpd.ModuleDef(name, impl)(tree.source)) + } + def ParsedTry(tree: Tree)(expr: Tree, handler: Tree, finalizer: Tree)(using Context): TermTree = tree match { + case tree: ParsedTry if (expr eq tree.expr) && (handler eq tree.handler) && (finalizer eq tree.finalizer) => tree + case _ => finalize(tree, untpd.ParsedTry(expr, handler, finalizer)(tree.source)) + } + def SymbolLit(tree: Tree)(str: String)(using Context): TermTree = tree match { + case tree: SymbolLit if str == tree.str => tree + case _ => finalize(tree, untpd.SymbolLit(str)(tree.source)) + } + def InterpolatedString(tree: Tree)(id: TermName, segments: List[Tree])(using Context): TermTree = tree match { + case tree: InterpolatedString if (id eq tree.id) && (segments eq tree.segments) => tree + case _ => finalize(tree, untpd.InterpolatedString(id, segments)(tree.source)) + } + def Function(tree: Tree)(args: List[Tree], body: Tree)(using Context): Tree = tree match { + case tree: Function if (args eq tree.args) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.Function(args, body)(tree.source)) + } + def PolyFunction(tree: Tree)(targs: List[Tree], body: Tree)(using Context): Tree = tree match { + case tree: PolyFunction if (targs eq tree.targs) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.PolyFunction(targs, body)(tree.source)) + } + def InfixOp(tree: Tree)(left: Tree, op: Ident, right: Tree)(using Context): Tree = tree match { + case tree: InfixOp if (left eq tree.left) && (op eq tree.op) && (right eq tree.right) => tree + case _ => finalize(tree, untpd.InfixOp(left, op, right)(tree.source)) + } + def PostfixOp(tree: Tree)(od: Tree, op: Ident)(using Context): Tree = tree match { + case tree: PostfixOp if (od eq tree.od) && (op eq tree.op) => tree + case _ => finalize(tree, untpd.PostfixOp(od, op)(tree.source)) + } + def PrefixOp(tree: Tree)(op: Ident, od: Tree)(using Context): Tree = tree match { + case tree: PrefixOp if (op eq tree.op) && (od eq tree.od) => tree + case _ => finalize(tree, untpd.PrefixOp(op, od)(tree.source)) + } + def Parens(tree: Tree)(t: Tree)(using Context): ProxyTree = tree match { + case tree: Parens if t eq tree.t => tree + case _ => finalize(tree, untpd.Parens(t)(tree.source)) + } + def Tuple(tree: Tree)(trees: List[Tree])(using Context): Tree = tree match { + case tree: Tuple if trees eq tree.trees => tree + case _ => finalize(tree, untpd.Tuple(trees)(tree.source)) + } + def Throw(tree: Tree)(expr: Tree)(using Context): TermTree = tree match { + case tree: Throw if expr eq tree.expr => tree + case _ => finalize(tree, untpd.Throw(expr)(tree.source)) + } + def Quote(tree: Tree)(quoted: Tree)(using Context): Tree = tree match { + case tree: Quote if quoted eq tree.quoted => tree + case _ => finalize(tree, untpd.Quote(quoted)(tree.source)) + } + def Splice(tree: Tree)(expr: Tree)(using Context): Tree = tree match { + case tree: Splice if expr eq tree.expr => tree + case _ => finalize(tree, untpd.Splice(expr)(tree.source)) + } + def ForYield(tree: Tree)(enums: List[Tree], expr: Tree)(using Context): TermTree = tree match { + case tree: ForYield if (enums eq tree.enums) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.ForYield(enums, expr)(tree.source)) + } + def ForDo(tree: Tree)(enums: List[Tree], body: Tree)(using Context): TermTree = tree match { + case tree: ForDo if (enums eq tree.enums) && (body eq tree.body) => tree + case _ => finalize(tree, untpd.ForDo(enums, body)(tree.source)) + } + def GenFrom(tree: Tree)(pat: Tree, expr: Tree, checkMode: GenCheckMode)(using Context): Tree = tree match { + case tree: GenFrom if (pat eq tree.pat) && (expr eq tree.expr) && (checkMode == tree.checkMode) => tree + case _ => finalize(tree, untpd.GenFrom(pat, expr, checkMode)(tree.source)) + } + def GenAlias(tree: Tree)(pat: Tree, expr: Tree)(using Context): Tree = tree match { + case tree: GenAlias if (pat eq tree.pat) && (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.GenAlias(pat, expr)(tree.source)) + } + def ContextBounds(tree: Tree)(bounds: TypeBoundsTree, cxBounds: List[Tree])(using Context): TypTree = tree match { + case tree: ContextBounds if (bounds eq tree.bounds) && (cxBounds eq tree.cxBounds) => tree + case _ => finalize(tree, untpd.ContextBounds(bounds, cxBounds)(tree.source)) + } + def PatDef(tree: Tree)(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(using Context): Tree = tree match { + case tree: PatDef if (mods eq tree.mods) && (pats eq tree.pats) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree + case _ => finalize(tree, untpd.PatDef(mods, pats, tpt, rhs)(tree.source)) + } + def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match + case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree + case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def Into(tree: Tree)(tpt: Tree)(using Context): Tree = tree match + case tree: Into if tpt eq tree.tpt => tree + case _ => finalize(tree, untpd.Into(tpt)(tree.source)) + def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { + case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree + case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) + } + def Number(tree: Tree)(digits: String, kind: NumberKind)(using Context): Tree = tree match { + case tree: Number if (digits == tree.digits) && (kind == tree.kind) => tree + case _ => finalize(tree, untpd.Number(digits, kind)) + } + def CapturingTypeTree(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match + case tree: CapturingTypeTree if (refs eq tree.refs) && (parent eq tree.parent) => tree + case _ => finalize(tree, untpd.CapturingTypeTree(refs, parent)) + + def TypedSplice(tree: Tree)(splice: tpd.Tree)(using Context): ProxyTree = tree match { + case tree: TypedSplice if splice `eq` tree.splice => tree + case _ => finalize(tree, untpd.TypedSplice(splice)(using ctx)) + } + def MacroTree(tree: Tree)(expr: Tree)(using Context): Tree = tree match { + case tree: MacroTree if expr `eq` tree.expr => tree + case _ => finalize(tree, untpd.MacroTree(expr)(tree.source)) + } + } + + abstract class UntypedTreeMap(cpy: UntypedTreeCopier = untpd.cpy) extends TreeMap(cpy) { + override def transformMoreCases(tree: Tree)(using Context): Tree = tree match { + case ModuleDef(name, impl) => + cpy.ModuleDef(tree)(name, transformSub(impl)) + case tree: DerivingTemplate => + cpy.Template(tree)(transformSub(tree.constr), transform(tree.parents), + transform(tree.derived), transformSub(tree.self), transformStats(tree.body, tree.symbol)) + case ParsedTry(expr, handler, finalizer) => + cpy.ParsedTry(tree)(transform(expr), transform(handler), transform(finalizer)) + case SymbolLit(str) => + cpy.SymbolLit(tree)(str) + case InterpolatedString(id, segments) => + cpy.InterpolatedString(tree)(id, segments.mapConserve(transform)) + case Function(args, body) => + cpy.Function(tree)(transform(args), transform(body)) + case PolyFunction(targs, body) => + cpy.PolyFunction(tree)(transform(targs), transform(body)) + case InfixOp(left, op, right) => + cpy.InfixOp(tree)(transform(left), op, transform(right)) + case PostfixOp(od, op) => + cpy.PostfixOp(tree)(transform(od), op) + case PrefixOp(op, od) => + cpy.PrefixOp(tree)(op, transform(od)) + case Parens(t) => + cpy.Parens(tree)(transform(t)) + case Tuple(trees) => + cpy.Tuple(tree)(transform(trees)) + case Throw(expr) => + cpy.Throw(tree)(transform(expr)) + case Quote(t) => + cpy.Quote(tree)(transform(t)) + case Splice(expr) => + cpy.Splice(tree)(transform(expr)) + case ForYield(enums, expr) => + cpy.ForYield(tree)(transform(enums), transform(expr)) + case ForDo(enums, body) => + cpy.ForDo(tree)(transform(enums), transform(body)) + case GenFrom(pat, expr, checkMode) => + cpy.GenFrom(tree)(transform(pat), transform(expr), checkMode) + case GenAlias(pat, expr) => + cpy.GenAlias(tree)(transform(pat), transform(expr)) + case ContextBounds(bounds, cxBounds) => + cpy.ContextBounds(tree)(transformSub(bounds), transform(cxBounds)) + case PatDef(mods, pats, tpt, rhs) => + cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) + case ExtMethods(paramss, methods) => + cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case Into(tpt) => + cpy.Into(tree)(transform(tpt)) + case ImportSelector(imported, renamed, bound) => + cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) + case Number(_, _) | TypedSplice(_) => + tree + case MacroTree(expr) => + cpy.MacroTree(tree)(transform(expr)) + case CapturingTypeTree(refs, parent) => + cpy.CapturingTypeTree(tree)(transform(refs), transform(parent)) + case _ => + super.transformMoreCases(tree) + } + } + + abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { + self: UntypedTreeAccumulator[X] @retains(caps.cap) => + override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match { + case ModuleDef(name, impl) => + this(x, impl) + case tree: DerivingTemplate => + this(this(this(this(this(x, tree.constr), tree.parents), tree.derived), tree.self), tree.body) + case ParsedTry(expr, handler, finalizer) => + this(this(this(x, expr), handler), finalizer) + case SymbolLit(str) => + x + case InterpolatedString(id, segments) => + this(x, segments) + case Function(args, body) => + this(this(x, args), body) + case PolyFunction(targs, body) => + this(this(x, targs), body) + case InfixOp(left, op, right) => + this(this(this(x, left), op), right) + case PostfixOp(od, op) => + this(this(x, od), op) + case PrefixOp(op, od) => + this(this(x, op), od) + case Parens(t) => + this(x, t) + case Tuple(trees) => + this(x, trees) + case Throw(expr) => + this(x, expr) + case Quote(t) => + this(x, t) + case Splice(expr) => + this(x, expr) + case ForYield(enums, expr) => + this(this(x, enums), expr) + case ForDo(enums, body) => + this(this(x, enums), body) + case GenFrom(pat, expr, _) => + this(this(x, pat), expr) + case GenAlias(pat, expr) => + this(this(x, pat), expr) + case ContextBounds(bounds, cxBounds) => + this(this(x, bounds), cxBounds) + case PatDef(mods, pats, tpt, rhs) => + this(this(this(x, pats), tpt), rhs) + case ExtMethods(paramss, methods) => + this(paramss.foldLeft(x)(apply), methods) + case Into(tpt) => + this(x, tpt) + case ImportSelector(imported, renamed, bound) => + this(this(this(x, imported), renamed), bound) + case Number(_, _) => + x + case TypedSplice(splice) => + this(x, splice) + case MacroTree(expr) => + this(x, expr) + case CapturingTypeTree(refs, parent) => + this(this(x, refs), parent) + case _ => + super.foldMoreCases(x, tree) + } + } + + abstract class UntypedTreeTraverser extends UntypedTreeAccumulator[Unit] { + def traverse(tree: Tree)(using Context): Unit + def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) + protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) + } + + /** Fold `f` over all tree nodes, in depth-first, prefix order */ + class UntypedDeepFolder[X](f: (X, Tree) => X) extends UntypedTreeAccumulator[X] { + def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) + } + + /** Is there a subtree of this tree that satisfies predicate `p`? */ + extension (tree: Tree) def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { + val acc = new UntypedTreeAccumulator[Boolean] { + def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) + } + acc(false, tree) + } + + protected def FunProto(args: List[Tree], resType: Type)(using Context) = + ProtoTypes.FunProto(args, resType)(ctx.typer, ApplyKind.Regular) +} diff --git a/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala b/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala new file mode 100644 index 000000000000..56b3f5ba5047 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala @@ -0,0 +1,19 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.* + +/** A one-element cache for the boxed version of an unboxed capturing type */ +class BoxedTypeCache: + private var boxed: Type = compiletime.uninitialized + private var unboxed: Type = NoType + + def apply(tp: AnnotatedType)(using Context): Type = + if tp ne unboxed then + unboxed = tp + val CapturingType(parent, refs) = tp: @unchecked + boxed = CapturingType(parent, refs, boxed = true) + boxed +end BoxedTypeCache \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala new file mode 100644 index 000000000000..67222f07efbb --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala @@ -0,0 +1,77 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Annotations.* +import ast.Trees.* +import ast.{tpd, untpd} +import Decorators.* +import config.Printers.capt +import printing.Printer +import printing.Texts.Text +import annotation.retains + +/** An annotation representing a capture set and whether it is boxed. + * It simulates a normal @retains annotation except that it is more efficient, + * supports variables as capture sets, and adds a `boxed` flag. + * These annotations are created during capture checking. Before that + * there are only regular @retains and @retainsByName annotations. + * @param refs the capture set + * @param boxed whether the type carrying the annotation is boxed + * @param cls the underlying class (either annotation.retains or annotation.retainsByName) + */ +case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) extends Annotation: + import CaptureAnnotation.* + import tpd.* + + /** A cache for boxed version of a capturing type with this annotation */ + val boxedType = BoxedTypeCache() + + /** Reconstitute annotation tree from capture set */ + override def tree(using Context) = + val elems = refs.elems.toList.map { + case cr: TermRef => ref(cr) + case cr: TermParamRef => untpd.Ident(cr.paramName).withType(cr) + case cr: ThisType => This(cr.cls) + } + val arg = repeated(elems, TypeTree(defn.AnyType)) + New(symbol.typeRef, arg :: Nil) + + override def symbol(using Context) = cls + + override def derivedAnnotation(tree: Tree)(using Context): Annotation = this + + def derivedAnnotation(refs: CaptureSet, boxed: Boolean)(using Context): Annotation = + if (this.refs eq refs) && (this.boxed == boxed) then this + else CaptureAnnotation(refs, boxed)(cls) + + override def sameAnnotation(that: Annotation)(using Context): Boolean = that match + case CaptureAnnotation(refs, boxed) => + this.refs == refs && this.boxed == boxed && this.symbol == that.symbol + case _ => false + + override def mapWith(tm: TypeMap @retains(caps.cap))(using Context) = + val elems = refs.elems.toList + val elems1 = elems.mapConserve(tm) + if elems1 eq elems then this + else if elems1.forall(_.isInstanceOf[CaptureRef]) + then derivedAnnotation(CaptureSet(elems1.asInstanceOf[List[CaptureRef]]*), boxed) + else EmptyAnnotation + + override def refersToParamOf(tl: TermLambda)(using Context): Boolean = + refs.elems.exists { + case TermParamRef(tl1, _) => tl eq tl1 + case _ => false + } + + override def toText(printer: Printer): Text = refs.toText(printer) + + override def hash: Int = + (refs.hashCode << 1) | (if boxed then 1 else 0) + + override def eql(that: Annotation) = that match + case that: CaptureAnnotation => (this.refs eq that.refs) && (this.boxed == that.boxed) + case _ => false + +end CaptureAnnotation diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala new file mode 100644 index 000000000000..0ede1825e611 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala @@ -0,0 +1,256 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* +import ast.{tpd, untpd} +import Decorators.*, NameOps.* +import config.Printers.capt +import util.Property.Key +import tpd.* +import config.Feature + +private val Captures: Key[CaptureSet] = Key() +private val BoxedType: Key[BoxedTypeCache] = Key() + +/** The arguments of a @retains or @retainsByName annotation */ +private[cc] def retainedElems(tree: Tree)(using Context): List[Tree] = tree match + case Apply(_, Typed(SeqLiteral(elems, _), _) :: Nil) => elems + case _ => Nil + +/** An exception thrown if a @retains argument is not syntactically a CaptureRef */ +class IllegalCaptureRef(tpe: Type) extends Exception + +extension (tree: Tree) + + /** Map tree with CaptureRef type to its type, throw IllegalCaptureRef otherwise */ + def toCaptureRef(using Context): CaptureRef = tree.tpe match + case ref: CaptureRef => ref + case tpe => throw IllegalCaptureRef(tpe) + + /** Convert a @retains or @retainsByName annotation tree to the capture set it represents. + * For efficience, the result is cached as an Attachment on the tree. + */ + def toCaptureSet(using Context): CaptureSet = + tree.getAttachment(Captures) match + case Some(refs) => refs + case None => + val refs = CaptureSet(retainedElems(tree).map(_.toCaptureRef)*) + .showing(i"toCaptureSet $tree --> $result", capt) + tree.putAttachment(Captures, refs) + refs + + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Tree = + if Feature.pureFunsEnabledSomewhere then + val rbn = defn.RetainsByNameAnnot + Annotated(tree, + New(rbn.typeRef).select(rbn.primaryConstructor).appliedTo( + Typed( + SeqLiteral(ref(defn.captureRoot) :: Nil, TypeTree(defn.AnyType)), + TypeTree(defn.RepeatedParamType.appliedTo(defn.AnyType)) + ) + ) + ) + else tree + +extension (tp: Type) + + /** @pre `tp` is a CapturingType */ + def derivedCapturingType(parent: Type, refs: CaptureSet)(using Context): Type = tp match + case tp @ CapturingType(p, r) => + if (parent eq p) && (refs eq r) then tp + else CapturingType(parent, refs, tp.isBoxed) + + /** If this is a unboxed capturing type with nonempty capture set, its boxed version. + * Or, if type is a TypeBounds of capturing types, the version where the bounds are boxed. + * The identity for all other types. + */ + def boxed(using Context): Type = tp.dealias match + case tp @ CapturingType(parent, refs) if !tp.isBoxed && !refs.isAlwaysEmpty => + tp.annot match + case ann: CaptureAnnotation => + ann.boxedType(tp) + case ann => + ann.tree.getAttachment(BoxedType) match + case None => ann.tree.putAttachment(BoxedType, BoxedTypeCache()) + case _ => + ann.tree.attachment(BoxedType)(tp) + case tp: RealTypeBounds => + tp.derivedTypeBounds(tp.lo.boxed, tp.hi.boxed) + case _ => + tp + + /** If `sym` is a type parameter, the boxed version of `tp`, otherwise `tp` */ + def boxedIfTypeParam(sym: Symbol)(using Context) = + if sym.is(TypeParam) then tp.boxed else tp + + /** The boxed version of `tp`, unless `tycon` is a function symbol */ + def boxedUnlessFun(tycon: Type)(using Context) = + if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionSymbol(tycon.typeSymbol) + then tp + else tp.boxed + + /** The capture set consisting of all top-level captures of `tp` that appear under a box. + * Unlike for `boxed` this also considers parents of capture types, unions and + * intersections, and type proxies other than abstract types. + */ + def boxedCaptureSet(using Context): CaptureSet = + def getBoxed(tp: Type): CaptureSet = tp match + case tp @ CapturingType(parent, refs) => + val pcs = getBoxed(parent) + if tp.isBoxed then refs ++ pcs else pcs + case tp: TypeRef if tp.symbol.isAbstractType => CaptureSet.empty + case tp: TypeProxy => getBoxed(tp.superType) + case tp: AndType => getBoxed(tp.tp1) ** getBoxed(tp.tp2) + case tp: OrType => getBoxed(tp.tp1) ++ getBoxed(tp.tp2) + case _ => CaptureSet.empty + getBoxed(tp) + + /** Is the boxedCaptureSet of this type nonempty? */ + def isBoxedCapturing(using Context) = !tp.boxedCaptureSet.isAlwaysEmpty + + /** If this type is a capturing type, the version with boxed statues as given by `boxed`. + * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing + * type that captures the TermRef. + */ + def forceBoxStatus(boxed: Boolean)(using Context): Type = tp.widenDealias match + case tp @ CapturingType(parent, refs) if tp.isBoxed != boxed => + val refs1 = tp match + case ref: CaptureRef if ref.isTracked => ref.singletonCaptureSet + case _ => refs + CapturingType(parent, refs1, boxed) + case _ => + tp + + /** Map capturing type to their parents. Capturing types accessible + * via dealising are also stripped. + */ + def stripCapturing(using Context): Type = tp.dealiasKeepAnnots match + case CapturingType(parent, _) => + parent.stripCapturing + case atd @ AnnotatedType(parent, annot) => + atd.derivedAnnotatedType(parent.stripCapturing, annot) + case _ => + tp + + /** Under pureFunctions, map regular function type to impure function type + */ + def adaptFunctionTypeUnderPureFuns(using Context): Type = tp match + case AppliedType(fn, args) + if Feature.pureFunsEnabledSomewhere && defn.isFunctionClass(fn.typeSymbol) => + val fname = fn.typeSymbol.name + defn.FunctionType( + fname.functionArity, + isContextual = fname.isContextFunction, + isErased = fname.isErasedFunction, + isImpure = true).appliedTo(args) + case _ => + tp + + /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of + * a by name parameter type, turning the latter into an impure by name parameter type. + */ + def adaptByNameArgUnderPureFuns(using Context): Type = + if Feature.pureFunsEnabledSomewhere then + AnnotatedType(tp, + CaptureAnnotation(CaptureSet.universal, boxed = false)(defn.RetainsByNameAnnot)) + else + tp + + def isCapturingType(using Context): Boolean = + tp match + case CapturingType(_, _) => true + case _ => false + + /** Is type known to be always pure by its class structure, + * so that adding a capture set to it would not make sense? + */ + def isAlwaysPure(using Context): Boolean = tp.dealias match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then sym.isPureClass + else tp.superType.isAlwaysPure + case CapturingType(parent, refs) => + parent.isAlwaysPure || refs.isAlwaysEmpty + case tp: TypeProxy => + tp.superType.isAlwaysPure + case tp: AndType => + tp.tp1.isAlwaysPure || tp.tp2.isAlwaysPure + case tp: OrType => + tp.tp1.isAlwaysPure && tp.tp2.isAlwaysPure + case _ => + false + +extension (cls: ClassSymbol) + + def pureBaseClass(using Context): Option[Symbol] = + cls.baseClasses.find(bc => + defn.pureBaseClasses.contains(bc) + || { + val selfType = bc.givenSelfType + selfType.exists && selfType.captureSet.isAlwaysEmpty + }) + +extension (sym: Symbol) + + /** A class is pure if: + * - one its base types has an explicitly declared self type with an empty capture set + * - or it is a value class + * - or it is an exception + * - or it is one of Nothing, Null, or String + */ + def isPureClass(using Context): Boolean = sym match + case cls: ClassSymbol => + cls.pureBaseClass.isDefined || defn.pureSimpleClasses.contains(cls) + case _ => + false + + /** Does this symbol allow results carrying the universal capability? + * Currently this is true only for function type applies (since their + * results are unboxed) and `erasedValue` since this function is magic in + * that is allows to conjure global capabilies from nothing (aside: can we find a + * more controlled way to achieve this?). + * But it could be generalized to other functions that so that they can take capability + * classes as arguments. + */ + def allowsRootCapture(using Context): Boolean = + sym == defn.Compiletime_erasedValue + || defn.isFunctionClass(sym.maybeOwner) + + /** When applying `sym`, would the result type be unboxed? + * This is the case if the result type contains a top-level reference to an enclosing + * class or method type parameter and the method does not allow root capture. + * If the type parameter is instantiated to a boxed type, that type would + * have to be unboxed in the method's result. + */ + def unboxesResult(using Context): Boolean = + def containsEnclTypeParam(tp: Type): Boolean = tp.strippedDealias match + case tp @ TypeRef(pre: ThisType, _) => tp.symbol.is(Param) + case tp: TypeParamRef => true + case tp: AndOrType => containsEnclTypeParam(tp.tp1) || containsEnclTypeParam(tp.tp2) + case tp: RefinedType => containsEnclTypeParam(tp.parent) || containsEnclTypeParam(tp.refinedInfo) + case _ => false + containsEnclTypeParam(sym.info.finalResultType) + && !sym.allowsRootCapture + && sym != defn.Caps_unsafeBox + && sym != defn.Caps_unsafeUnbox + +extension (tp: AnnotatedType) + /** Is this a boxed capturing type? */ + def isBoxed(using Context): Boolean = tp.annot match + case ann: CaptureAnnotation => ann.boxed + case _ => false + +extension (ts: List[Type]) + /** Equivalent to ts.mapconserve(_.boxedUnlessFun(tycon)) but more efficient where + * it is the identity. + */ + def boxedUnlessFun(tycon: Type)(using Context) = + if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionClass(tycon.typeSymbol) + then ts + else ts.mapconserve(_.boxed) + diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala new file mode 100644 index 000000000000..c31bcb76c2c7 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala @@ -0,0 +1,902 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Flags.*, Contexts.*, Decorators.* +import config.Printers.capt +import Annotations.Annotation +import annotation.threadUnsafe +import annotation.constructorOnly +import annotation.internal.sharable +import reporting.trace +import printing.{Showable, Printer} +import printing.Texts.* +import util.{SimpleIdentitySet, Property} +import util.common.alwaysTrue +import scala.collection.mutable +import config.Config.ccAllowUnsoundMaps +import language.experimental.pureFunctions +import annotation.retains + +/** A class for capture sets. Capture sets can be constants or variables. + * Capture sets support inclusion constraints <:< where <:< is subcapturing. + * + * They also allow + * - mapping with functions from elements to capture sets + * - filtering with predicates on elements + * - intersecting wo capture sets + * + * That is, constraints can be of the forms + * + * cs1 <:< cs2 + * cs1 = ∪ {f(x) | x ∈ cs2} where f is a function from capture references to capture sets. + * cs1 = ∪ {x | x ∈ cs2, p(x)} where p is a predicate on capture references + * cs1 = cs2 ∩ cs2 + * + * We call the resulting constraint system "monadic set constraints". + * To support capture propagation across maps, mappings are supported only + * if the mapped function is either a bijection or if it is idempotent + * on capture references (c.f. doc comment on `map` below). + */ +sealed abstract class CaptureSet extends Showable, caps.Pure: + import CaptureSet.* + + /** The elements of this capture set. For capture variables, + * the elements known so far. + */ + def elems: Refs + + /** Is this capture set constant (i.e. not an unsolved capture variable)? + * Solved capture variables count as constant. + */ + def isConst: Boolean + + /** Is this capture set always empty? For unsolved capture veriables, returns + * always false. + */ + def isAlwaysEmpty: Boolean + + /** Is this capture set definitely non-empty? */ + final def isNotEmpty: Boolean = !elems.isEmpty + + /** Convert to Const. @pre: isConst */ + def asConst: Const = this match + case c: Const => c + case v: Var => + assert(v.isConst) + Const(v.elems) + + /** Cast to variable. @pre: !isConst */ + def asVar: Var = + assert(!isConst) + asInstanceOf[Var] + + /** Does this capture set contain the root reference `*` as element? */ + final def isUniversal(using Context) = + elems.exists { + case ref: TermRef => ref.symbol == defn.captureRoot + case _ => false + } + + /** Add new elements to this capture set if allowed. + * @pre `newElems` is not empty and does not overlap with `this.elems`. + * Constant capture sets never allow to add new elements. + * Variables allow it if and only if the new elements can be included + * in all their dependent sets. + * @param origin The set where the elements come from, or `empty` if not known. + * @return CompareResult.OK if elements were added, or a conflicting + * capture set that prevents addition otherwise. + */ + protected def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult + + /** If this is a variable, add `cs` as a dependent set */ + protected def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult + + /** If `cs` is a variable, add this capture set as one of its dependent sets */ + protected def addAsDependentTo(cs: CaptureSet)(using Context): this.type = + cs.addDependent(this)(using ctx, UnrecordedState) + this + + /** Try to include all references of `elems` that are not yet accounted for by this + * capture set. Inclusion is via `addNewElems`. + * @param origin The set where the elements come from, or `empty` if not known. + * @return CompareResult.OK if all unaccounted elements could be added, + * capture set that prevents addition otherwise. + */ + protected final def tryInclude(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val unaccounted = elems.filter(!accountsFor(_)) + if unaccounted.isEmpty then CompareResult.OK + else addNewElems(unaccounted, origin) + + /** Equivalent to `tryInclude({elem}, origin)`, but more efficient */ + protected final def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + if accountsFor(elem) then CompareResult.OK + else addNewElems(elem.singletonCaptureSet.elems, origin) + + /* x subsumes y if x is the same as y, or x is a this reference and y refers to a field of x */ + extension (x: CaptureRef) private def subsumes(y: CaptureRef) = + (x eq y) + || y.match + case y: TermRef => y.prefix eq x + case _ => false + + /** {x} <:< this where <:< is subcapturing, but treating all variables + * as frozen. + */ + def accountsFor(x: CaptureRef)(using Context): Boolean = + reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true) { + elems.exists(_.subsumes(x)) + || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + } + + /** A more optimistic version of accountsFor, which does not take variable supersets + * of the `x` reference into account. A set might account for `x` if it accounts + * for `x` in a state where we assume all supersets of `x` have just the elements + * known at this point. On the other hand if x's capture set has no known elements, + * a set `cs` might account for `x` only if it subsumes `x` or it contains the + * root capability `*`. + */ + def mightAccountFor(x: CaptureRef)(using Context): Boolean = + reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { + elems.exists(elem => elem.subsumes(x) || elem.isRootCapability) + || !x.isRootCapability + && { + val elems = x.captureSetOfInfo.elems + !elems.isEmpty && elems.forall(mightAccountFor) + } + } + + /** A more optimistic version of subCaptures used to choose one of two typing rules + * for selections and applications. `cs1 mightSubcapture cs2` if `cs2` might account for + * every element currently known to be in `cs1`. + */ + def mightSubcapture(that: CaptureSet)(using Context): Boolean = + elems.forall(that.mightAccountFor) + + /** The subcapturing test. + * @param frozen if true, no new variables or dependent sets are allowed to + * be added when making this test. An attempt to add either + * will result in failure. + */ + final def subCaptures(that: CaptureSet, frozen: Boolean)(using Context): CompareResult = + subCaptures(that)(using ctx, if frozen then FrozenState else VarState()) + + /** The subcapturing test, using a given VarState */ + private def subCaptures(that: CaptureSet)(using Context, VarState): CompareResult = + def recur(elems: List[CaptureRef]): CompareResult = elems match + case elem :: elems1 => + var result = that.tryInclude(elem, this) + if !result.isOK && !elem.isRootCapability && summon[VarState] != FrozenState then + result = elem.captureSetOfInfo.subCaptures(that) + if result.isOK then + recur(elems1) + else + varState.rollBack() + result + case Nil => + addDependent(that) + recur(elems.toList) + .showing(i"subcaptures $this <:< $that = $result", capt)(using null) + + /** Two capture sets are considered =:= equal if they mutually subcapture each other + * in a frozen state. + */ + def =:= (that: CaptureSet)(using Context): Boolean = + this.subCaptures(that, frozen = true).isOK + && that.subCaptures(this, frozen = true).isOK + + /** The smallest capture set (via <:<) that is a superset of both + * `this` and `that` + */ + def ++ (that: CaptureSet)(using Context): CaptureSet = + if this.subCaptures(that, frozen = true).isOK then that + else if that.subCaptures(this, frozen = true).isOK then this + else if this.isConst && that.isConst then Const(this.elems ++ that.elems) + else Var(this.elems ++ that.elems).addAsDependentTo(this).addAsDependentTo(that) + + /** The smallest superset (via <:<) of this capture set that also contains `ref`. + */ + def + (ref: CaptureRef)(using Context): CaptureSet = + this ++ ref.singletonCaptureSet + + /** The largest capture set (via <:<) that is a subset of both `this` and `that` + */ + def **(that: CaptureSet)(using Context): CaptureSet = + if this.subCaptures(that, frozen = true).isOK then this + else if that.subCaptures(this, frozen = true).isOK then that + else if this.isConst && that.isConst then Const(elemIntersection(this, that)) + else Intersected(this, that) + + /** The largest subset (via <:<) of this capture set that does not account for + * any of the elements in the constant capture set `that` + */ + def -- (that: CaptureSet.Const)(using Context): CaptureSet = + val elems1 = elems.filter(!that.accountsFor(_)) + if elems1.size == elems.size then this + else if this.isConst then Const(elems1) + else Diff(asVar, that) + + /** The largest subset (via <:<) of this capture set that does not account for `ref` */ + def - (ref: CaptureRef)(using Context): CaptureSet = + this -- ref.singletonCaptureSet + + /** The largest subset (via <:<) of this capture set that only contains elements + * for which `p` is true. + */ + def filter(p: (c: Context) ?-> (CaptureRef -> Boolean) @retains(c))(using Context): CaptureSet = + if this.isConst then + val elems1 = elems.filter(p) + if elems1 == elems then this + else Const(elems.filter(p)) + else Filtered(asVar, p) + + /** Capture set obtained by applying `tm` to all elements of the current capture set + * and joining the results. If the current capture set is a variable, the same + * transformation is applied to all future additions of new elements. + * + * Note: We have a problem how we handle the situation where we have a mapped set + * + * cs2 = tm(cs1) + * + * and then the propagation solver adds a new element `x` to `cs2`. What do we + * know in this case about `cs1`? We can answer this question in a sound way only + * if `tm` is a bijection on capture references or it is idempotent on capture references. + * (see definition in IdempotentCapRefMap). + * If `tm` is a bijection we know that `tm^-1(x)` must be in `cs1`. If `tm` is idempotent + * one possible solution is that `x` is in `cs1`, which is what we assume in this case. + * That strategy is sound but not complete. + * + * If `tm` is some other map, we don't know how to handle this case. For now, + * we simply refuse to handle other maps. If they do need to be handled, + * `OtherMapped` provides some approximation to a solution, but it is neither + * sound nor complete. + */ + def map(tm: TypeMap)(using Context): CaptureSet = tm match + case tm: BiTypeMap => + val mappedElems = elems.map(tm.forward) + if isConst then + if mappedElems == elems then this + else Const(mappedElems) + else BiMapped(asVar, tm, mappedElems) + case tm: IdentityCaptRefMap => + this + case _ => + val mapped = mapRefs(elems, tm, tm.variance) + if isConst then + if mapped.isConst && mapped.elems == elems then this + else mapped + else Mapped(asVar, tm, tm.variance, mapped) + + /** A mapping resulting from substituting parameters of a BindingType to a list of types */ + def substParams(tl: BindingType, to: List[Type])(using Context) = + map(Substituters.SubstParamsMap(tl, to).detach) + + /** Invoke handler if this set has (or later aquires) the root capability `*` */ + def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = + if isUniversal then handler() + this + + /** An upper approximation of this capture set, i.e. a constant set that is + * subcaptured by this set. If the current set is a variable + * it is the intersection of all upper approximations of known supersets + * of the variable. + * The upper approximation is meaningful only if it is constant. If not, + * `upperApprox` can return an arbitrary capture set variable. + * `upperApprox` is used in `solve`. + */ + protected def upperApprox(origin: CaptureSet)(using Context): CaptureSet + + /** Assuming set this set dependds on was just solved to be constant, propagate this info + * to this set. This might result in the set being solved to be constant + * itself. + */ + protected def propagateSolved()(using Context): Unit = () + + /** This capture set with a description that tells where it comes from */ + def withDescription(description: String): CaptureSet + + /** The provided description (using `withDescription`) for this capture set or else "" */ + def description: String + + /** A regular @retains or @retainsByName annotation with the elements of this set as arguments. */ + def toRegularAnnotation(cls: Symbol)(using Context): Annotation = + Annotation(CaptureAnnotation(this, boxed = false)(cls).tree) + + override def toText(printer: Printer): Text = + Str("{") ~ Text(elems.toList.map(printer.toTextCaptureRef), ", ") ~ Str("}") ~~ description + +object CaptureSet: + type Refs = SimpleIdentitySet[CaptureRef] + type Vars = SimpleIdentitySet[Var] + type Deps = SimpleIdentitySet[CaptureSet] + + @sharable private var varId = 0 + + /** If set to `true`, capture stack traces that tell us where sets are created */ + private final val debugSets = false + + private val emptySet = SimpleIdentitySet.empty + + /** The empty capture set `{}` */ + val empty: CaptureSet.Const = Const(emptySet) + + /** The universal capture set `{*}` */ + def universal(using Context): CaptureSet = + defn.captureRoot.termRef.singletonCaptureSet + + /** Used as a recursion brake */ + @sharable private[dotc] val Pending = Const(SimpleIdentitySet.empty) + + def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = + if elems.isEmpty then empty + else Const(SimpleIdentitySet(elems.map(_.normalizedRef)*)) + + def apply(elems: Refs)(using Context): CaptureSet.Const = + if elems.isEmpty then empty else Const(elems) + + /** The subclass of constant capture sets with given elements `elems` */ + class Const private[CaptureSet] (val elems: Refs, val description: String = "") extends CaptureSet: + def isConst = true + def isAlwaysEmpty = elems.isEmpty + + def addNewElems(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + CompareResult.fail(this) + + def addDependent(cs: CaptureSet)(using Context, VarState) = CompareResult.OK + + def upperApprox(origin: CaptureSet)(using Context): CaptureSet = this + + def withDescription(description: String): Const = Const(elems, description) + + override def toString = elems.toString + end Const + + /** The subclass of captureset variables with given initial elements */ + class Var(initialElems: Refs = emptySet) extends CaptureSet: + + /** A unique identification number for diagnostics */ + val id = + varId += 1 + varId + + /** A variable is solved if it is aproximated to a from-then-on constant set. */ + private var isSolved: Boolean = false + + /** The elements currently known to be in the set */ + var elems: Refs = initialElems + + /** The sets currently known to be dependent sets (i.e. new additions to this set + * are propagated to these dependent sets.) + */ + var deps: Deps = emptySet + + def isConst = isSolved + def isAlwaysEmpty = false + + /** A handler to be invoked if the root reference `*` is added to this set + * The handler is pure in the sense that it will only output diagnostics. + */ + var rootAddedHandler: () -> Context ?-> Unit = () => () + + var description: String = "" + + /** Record current elements in given VarState provided it does not yet + * contain an entry for this variable. + */ + private def recordElemsState()(using VarState): Boolean = + varState.getElems(this) match + case None => varState.putElems(this, elems) + case _ => true + + /** Record current dependent sets in given VarState provided it does not yet + * contain an entry for this variable. + */ + private[CaptureSet] def recordDepsState()(using VarState): Boolean = + varState.getDeps(this) match + case None => varState.putDeps(this, deps) + case _ => true + + /** Reset elements to what was recorded in `state` */ + def resetElems()(using state: VarState): Unit = + elems = state.elems(this) + + /** Reset dependent sets to what was recorded in `state` */ + def resetDeps()(using state: VarState): Unit = + deps = state.deps(this) + + def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + if !isConst && recordElemsState() then + elems ++= newElems + if isUniversal then rootAddedHandler() + // assert(id != 2 || elems.size != 2, this) + (CompareResult.OK /: deps) { (r, dep) => + r.andAlso(dep.tryInclude(newElems, this)) + } + else // fail if variable is solved or given VarState is frozen + CompareResult.fail(this) + + def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult = + if (cs eq this) || cs.isUniversal || isConst then + CompareResult.OK + else if recordDepsState() then + deps += cs + CompareResult.OK + else + CompareResult.fail(this) + + override def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = + rootAddedHandler = handler + super.disallowRootCapability(handler) + + private var computingApprox = false + + /** Roughly: the intersection of all constant known supersets of this set. + * The aim is to find an as-good-as-possible constant set that is a superset + * of this set. The universal set {*} is a sound fallback. + */ + final def upperApprox(origin: CaptureSet)(using Context): CaptureSet = + if computingApprox then universal + else if isConst then this + else + computingApprox = true + try computeApprox(origin).ensuring(_.isConst) + finally computingApprox = false + + /** The intersection of all upper approximations of dependent sets */ + protected def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + (universal /: deps) { (acc, sup) => acc ** sup.upperApprox(this) } + + /** Widen the variable's elements to its upper approximation and + * mark it as constant from now on. This is used for contra-variant type variables + * in the results of defs and vals. + */ + def solve()(using Context): Unit = + if !isConst then + val approx = upperApprox(empty) + //println(i"solving var $this $approx ${approx.isConst} deps = ${deps.toList}") + val newElems = approx.elems -- elems + if newElems.isEmpty || addNewElems(newElems, empty)(using ctx, VarState()).isOK then + markSolved() + + /** Mark set as solved and propagate this info to all dependent sets */ + def markSolved()(using Context): Unit = + isSolved = true + deps.foreach(_.propagateSolved()) + + def withDescription(description: String): this.type = + this.description = + if this.description.isEmpty then description + else s"${this.description} and $description" + this + + /** Used for diagnostics and debugging: A string that traces the creation + * history of a variable by following source links. Each variable on the + * path is characterized by the variable's id and the first letter of the + * variable's class name. The path ends in a plain variable with letter `V` that + * is not derived from some other variable. + */ + protected def ids(using Context): String = + val trail = this.match + case dv: DerivedVar => dv.source.ids + case _ => "" + s"$id${getClass.getSimpleName.nn.take(1)}$trail" + + /** Adds variables to the ShownVars context property if that exists, which + * establishes a record of all variables printed in an error message. + * Prints variables wih ids under -Ycc-debug. + */ + override def toText(printer: Printer): Text = inContext(printer.printerContext) { + for vars <- ctx.property(ShownVars) do vars += this + super.toText(printer) ~ (Str(ids) provided !isConst && ctx.settings.YccDebug.value) + } + + override def toString = s"Var$id$elems" + end Var + + /** A variable that is derived from some other variable via a map or filter. */ + abstract class DerivedVar(initialElems: Refs)(using @constructorOnly ctx: Context) + extends Var(initialElems): + + // For debugging: A trace where a set was created. Note that logically it would make more + // sense to place this variable in Mapped, but that runs afoul of the initializatuon checker. + val stack = if debugSets && this.isInstanceOf[Mapped] then (new Throwable).getStackTrace().nn.take(20) else null + + /** The variable from which this variable is derived */ + def source: Var + + addAsDependentTo(source) + + override def propagateSolved()(using Context) = + if source.isConst && !isConst then markSolved() + end DerivedVar + + /** A variable that changes when `source` changes, where all additional new elements are mapped + * using ∪ { tm(x) | x <- source.elems }. + * @param source the original set that is mapped + * @param tm the type map, which is assumed to be idempotent on capture refs + * (except if ccUnsoundMaps is enabled) + * @param variance the assumed variance with which types with capturesets of size >= 2 are approximated + * (i.e. co: full capture set, contra: empty set, nonvariant is not allowed.) + * @param initial The initial mappings of source's elements at the point the Mapped set is created. + */ + class Mapped private[CaptureSet] + (val source: Var, tm: TypeMap, variance: Int, initial: CaptureSet)(using @constructorOnly ctx: Context) + extends DerivedVar(initial.elems): + addAsDependentTo(initial) // initial mappings could change by propagation + + private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] + + assert(ccAllowUnsoundMaps || mapIsIdempotent, tm.getClass) + + private def whereCreated(using Context): String = + if stack == null then "" + else i""" + |Stack trace of variable creation:" + |${stack.mkString("\n")}""" + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val added = + if origin eq source then // elements have to be mapped + mapRefs(newElems, tm, variance) + else + // elements are added by subcapturing propagation with this Mapped set + // as superset; no mapping is necessary or allowed. + Const(newElems) + super.addNewElems(added.elems, origin) + .andAlso { + if added.isConst then CompareResult.OK + else if added.asVar.recordDepsState() then { addAsDependentTo(added); CompareResult.OK } + else CompareResult.fail(this) + } + .andAlso { + if (origin ne source) && (origin ne initial) && mapIsIdempotent then + // `tm` is idempotent, propagate back elems from image set. + // This is sound, since we know that for `r in newElems: tm(r) = r`, hence + // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. + // It's not necessarily the only possible solution, so the scheme is incomplete. + source.tryInclude(newElems, this) + else if !mapIsIdempotent && variance <= 0 && !origin.isConst && (origin ne initial) && (origin ne source) then + // The map is neither a BiTypeMap nor an idempotent type map. + // In that case there's no much we can do. + // The scheme then does not propagate added elements back to source and rejects adding + // elements from variable sources in contra- and non-variant positions. In essence, + // we approximate types resulting from such maps by returning a possible super type + // from the actual type. But this is neither sound nor complete. + report.warning(em"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") + CompareResult.fail(this) + else + CompareResult.OK + } + + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + if source eq origin then + // it's a mapping of origin, so not a superset of `origin`, + // therefore don't contribute to the intersection. + universal + else + source.upperApprox(this).map(tm) + + override def propagateSolved()(using Context) = + if initial.isConst then super.propagateSolved() + + override def toString = s"Mapped$id($source, elems = $elems)" + end Mapped + + /** A mapping where the type map is required to be a bijection. + * Parameters as in Mapped. + */ + final class BiMapped private[CaptureSet] + (val source: Var, bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) + extends DerivedVar(initialElems): + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + if origin eq source then + super.addNewElems(newElems.map(bimap.forward), origin) + else + super.addNewElems(newElems, origin) + .andAlso { + source.tryInclude(newElems.map(bimap.backward), this) + .showing(i"propagating new elems ${CaptureSet(newElems)} backward from $this to $source", capt)(using null) + } + + /** For a BiTypeMap, supertypes of the mapped type also constrain + * the source via the inverse type mapping and vice versa. That is, if + * B = f(A) and B <: C, then A <: f^-1(C), so C should flow into + * the upper approximation of A. + * Conversely if A <: C2, then we also know that B <: f(C2). + * These situations are modeled by the two branches of the conditional below. + */ + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + val supApprox = super.computeApprox(this) + if source eq origin then supApprox.map(bimap.inverseTypeMap.detach) + else source.upperApprox(this).map(bimap) ** supApprox + + override def toString = s"BiMapped$id($source, elems = $elems)" + end BiMapped + + /** A variable with elements given at any time as { x <- source.elems | p(x) } */ + class Filtered private[CaptureSet] + (val source: Var, p: (c: Context) ?-> (CaptureRef -> Boolean) @retains(c))(using @constructorOnly ctx: Context) + extends DerivedVar(source.elems.filter(p)): + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val filtered = newElems.filter(p) + if origin eq source then + super.addNewElems(filtered, origin) + else + // Filtered elements have to be back-propagated to source. + // Elements that don't satisfy `p` are not allowed. + super.addNewElems(newElems, origin) + .andAlso { + if filtered.size == newElems.size then source.tryInclude(newElems, this) + else CompareResult.fail(this) + } + + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + if source eq origin then + // it's a filter of origin, so not a superset of `origin`, + // therefore don't contribute to the intersection. + universal + else + source.upperApprox(this).filter(p) + + override def toString = s"${getClass.getSimpleName}$id($source, elems = $elems)" + end Filtered + + /** A variable with elements given at any time as { x <- source.elems | !other.accountsFor(x) } */ + class Diff(source: Var, other: Const)(using @constructorOnly ctx: Context) + extends Filtered(source, !other.accountsFor(_)) + + class Intersected(cs1: CaptureSet, cs2: CaptureSet)(using @constructorOnly ctx: Context) + extends Var(elemIntersection(cs1, cs2)): + addAsDependentTo(cs1) + addAsDependentTo(cs2) + deps += cs1 + deps += cs2 + + override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = + val added = + if origin eq cs1 then newElems.filter(cs2.accountsFor) + else if origin eq cs2 then newElems.filter(cs1.accountsFor) + else newElems + // If origin is not cs1 or cs2, then newElems will be propagated to + // cs1, cs2 since they are in deps. + super.addNewElems(added, origin) + + override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = + if (origin eq cs1) || (origin eq cs2) then + // it's a combination of origin with some other set, so not a superset of `origin`, + // therefore don't contribute to the intersection. + universal + else + CaptureSet(elemIntersection(cs1.upperApprox(this), cs2.upperApprox(this))) + + override def propagateSolved()(using Context) = + if cs1.isConst && cs2.isConst && !isConst then markSolved() + end Intersected + + def elemIntersection(cs1: CaptureSet, cs2: CaptureSet)(using Context): Refs = + cs1.elems.filter(cs2.mightAccountFor) ++ cs2.elems.filter(cs1.mightAccountFor) + + /** Extrapolate tm(r) according to `variance`. Let r1 be the result of tm(r). + * - If r1 is a tracked CaptureRef, return {r1} + * - If r1 has an empty capture set, return {} + * - Otherwise, + * - if the variance is covariant, return r1's capture set + * - if the variance is contravariant, return {} + * - Otherwise assertion failure + */ + def extrapolateCaptureRef(r: CaptureRef, tm: TypeMap, variance: Int)(using Context): CaptureSet = + val r1 = tm(r) + val upper = r1.captureSet + def isExact = + upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) + if variance > 0 || isExact then upper + else if variance < 0 then CaptureSet.empty + else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") + + /** Apply `f` to each element in `xs`, and join result sets with `++` */ + def mapRefs(xs: Refs, f: CaptureRef => CaptureSet)(using Context): CaptureSet = + ((empty: CaptureSet) /: xs)((cs, x) => cs ++ f(x)) + + /** Apply extrapolated `tm` to each element in `xs`, and join result sets with `++` */ + def mapRefs(xs: Refs, tm: TypeMap, variance: Int)(using Context): CaptureSet = + mapRefs(xs, extrapolateCaptureRef(_, tm, variance)) + + /** Return true iff + * - arg1 is a TypeBounds >: CL T <: CH T of two capturing types with equal parents. + * - arg2 is a capturing type CA U + * - CH <: CA <: CL + * In other words, we can unify CL, CH and CA. + */ + def subCapturesRange(arg1: TypeBounds, arg2: Type)(using Context): Boolean = arg1 match + case TypeBounds(CapturingType(lo, loRefs), CapturingType(hi, hiRefs)) if lo =:= hi => + given VarState = VarState() + val cs2 = arg2.captureSet + hiRefs.subCaptures(cs2).isOK && cs2.subCaptures(loRefs).isOK + case _ => + false + + /** A TypeMap with the property that every capture reference in the image + * of the map is mapped to itself. I.e. for all capture references r1, r2, + * if M(r1) == r2 then M(r2) == r2. + */ + trait IdempotentCaptRefMap extends TypeMap + + /** A TypeMap that is the identity on capture references */ + trait IdentityCaptRefMap extends TypeMap + + type CompareResult = CompareResult.TYPE + + /** The result of subcapturing comparisons is an opaque type CompareResult.TYPE. + * This is either OK, indicating success, or + * another capture set, indicating failure. The failure capture set + * is the one that did not allow propagaton of elements into it. + */ + object CompareResult: + opaque type TYPE = CaptureSet + val OK: TYPE = Const(emptySet) + def fail(cs: CaptureSet): TYPE = cs + + extension (result: TYPE) + /** The result is OK */ + def isOK: Boolean = result eq OK + /** If not isOK, the blocking capture set */ + def blocking: CaptureSet = result + inline def andAlso(op: Context ?=> TYPE)(using Context): TYPE = if result.isOK then op else result + def show(using Context): String = if result.isOK then "OK" else i"$result" + end CompareResult + + /** A VarState serves as a snapshot mechanism that can undo + * additions of elements or super sets if an operation fails + */ + class VarState: + + /** A map from captureset variables to their elements at the time of the snapshot. */ + private val elemsMap: util.EqHashMap[Var, Refs] = new util.EqHashMap + + /** A map from captureset variables to their dependent sets at the time of the snapshot. */ + private val depsMap: util.EqHashMap[Var, Deps] = new util.EqHashMap + + /** The recorded elements of `v` (it's required that a recording was made) */ + def elems(v: Var): Refs = elemsMap(v) + + /** Optionally the recorded elements of `v`, None if nothing was recorded for `v` */ + def getElems(v: Var): Option[Refs] = elemsMap.get(v) + + /** Record elements, return whether this was allowed. + * By default, recording is allowed but the special state FrozenState + * overrides this. + */ + def putElems(v: Var, elems: Refs): Boolean = { elemsMap(v) = elems; true } + + /** The recorded dependent sets of `v` (it's required that a recording was made) */ + def deps(v: Var): Deps = depsMap(v) + + /** Optionally the recorded dependent sets of `v`, None if nothing was recorded for `v` */ + def getDeps(v: Var): Option[Deps] = depsMap.get(v) + + /** Record dependent sets, return whether this was allowed. + * By default, recording is allowed but the special state FrozenState + * overrides this. + */ + def putDeps(v: Var, deps: Deps): Boolean = { depsMap(v) = deps; true } + + /** Roll back global state to what was recorded in this VarState */ + def rollBack(): Unit = + elemsMap.keysIterator.foreach(_.resetElems()(using this)) + depsMap.keysIterator.foreach(_.resetDeps()(using this)) + end VarState + + /** A special state that does not allow to record elements or dependent sets. + * In effect this means that no new elements or dependent sets can be added + * in this state (since the previous state cannot be recorded in a snapshot) + */ + @sharable + object FrozenState extends VarState: + override def putElems(v: Var, refs: Refs) = false + override def putDeps(v: Var, deps: Deps) = false + override def rollBack(): Unit = () + + @sharable + /** A special state that turns off recording of elements. Used only + * in `addSub` to prevent cycles in recordings. + */ + private object UnrecordedState extends VarState: + override def putElems(v: Var, refs: Refs) = true + override def putDeps(v: Var, deps: Deps) = true + override def rollBack(): Unit = () + + /** The current VarState, as passed by the implicit context */ + def varState(using state: VarState): VarState = state + + /* Not needed: + def ofClass(cinfo: ClassInfo, argTypes: List[Type])(using Context): CaptureSet = + CaptureSet.empty + def captureSetOf(tp: Type): CaptureSet = tp match + case tp: TypeRef if tp.symbol.is(ParamAccessor) => + def mapArg(accs: List[Symbol], tps: List[Type]): CaptureSet = accs match + case acc :: accs1 if tps.nonEmpty => + if acc == tp.symbol then tps.head.captureSet + else mapArg(accs1, tps.tail) + case _ => + empty + mapArg(cinfo.cls.paramAccessors, argTypes) + case _ => + tp.captureSet + val css = + for + parent <- cinfo.parents if parent.classSymbol == defn.RetainingClass + arg <- parent.argInfos + yield captureSetOf(arg) + css.foldLeft(empty)(_ ++ _) + */ + + /** The capture set of the type underlying a CaptureRef */ + def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match + case ref: TermRef if ref.isRootCapability => ref.singletonCaptureSet + case _ => ofType(ref.underlying) + + /** Capture set of a type */ + def ofType(tp: Type)(using Context): CaptureSet = + def recur(tp: Type): CaptureSet = tp.dealias match + case tp: TermRef => + tp.captureSet + case tp: TermParamRef => + tp.captureSet + case _: TypeRef => + if tp.classSymbol.hasAnnotation(defn.CapabilityAnnot) then universal else empty + case _: TypeParamRef => + empty + case CapturingType(parent, refs) => + recur(parent) ++ refs + case AppliedType(tycon, args) => + val cs = recur(tycon) + tycon.typeParams match + case tparams @ (LambdaParam(tl, _) :: _) => cs.substParams(tl, args) + case _ => cs + case tp: TypeProxy => + recur(tp.underlying) + case AndType(tp1, tp2) => + recur(tp1) ** recur(tp2) + case OrType(tp1, tp2) => + recur(tp1) ++ recur(tp2) + case _ => + empty + recur(tp) + .showing(i"capture set of $tp = $result", capt) + + private val ShownVars: Property.Key[mutable.Set[Var]] = Property.Key() + + /** Perform `op`. Under -Ycc-debug, collect and print info about all variables reachable + * via `(_.deps)*` from the variables that were shown in `op`. + */ + def withCaptureSetsExplained[T](op: Context ?=> T)(using ctx: Context): T = + if ctx.settings.YccDebug.value then + val shownVars = mutable.Set[Var]() + inContext(ctx.withProperty(ShownVars, Some(shownVars))) { + try op + finally + val reachable = mutable.Set[Var]() + val todo = mutable.Queue[Var]() ++= shownVars + def incl(cv: Var): Unit = + if !reachable.contains(cv) then todo += cv + while todo.nonEmpty do + val cv = todo.dequeue() + if !reachable.contains(cv) then + reachable += cv + cv.deps.foreach { + case cv: Var => incl(cv) + case _ => + } + cv match + case cv: DerivedVar => incl(cv.source) + case _ => + val allVars = reachable.toArray.sortBy(_.id) + println(i"Capture set dependencies:") + for cv <- allVars do + println(i" ${cv.show.padTo(20, ' ')} :: ${cv.deps.toList}%, %") + } + else op +end CaptureSet diff --git a/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala b/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala new file mode 100644 index 000000000000..e9862f1f20b8 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala @@ -0,0 +1,72 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.* + +/** A (possibly boxed) capturing type. This is internally represented as an annotated type with a @retains + * or @retainsByName annotation, but the extractor will succeed only at phase CheckCaptures. + * That way, we can ignore caturing information until phase CheckCaptures since it is + * wrapped in a plain annotation. + * + * The same trick does not work for the boxing information. Boxing is context dependent, so + * we have to add that information in the Setup step preceding CheckCaptures. Boxes are + * added for all type arguments of methods. For type arguments of applied types a different + * strategy is used where we box arguments of applied types that are not functions when + * accessing the argument. + * + * An alternative strategy would add boxes also to arguments of applied types during setup. + * But this would have to be done for all possibly accessibly types from the compiled units + * as well as their dependencies. It's difficult to do this in a DenotationTransformer without + * accidentally forcing symbol infos. That's why this alternative was not implemented. + * If we would go back on this it would make sense to also treat captuyring types different + * from annotations and to generate them all during Setup and in DenotationTransformers. + */ +object CapturingType: + + /** Smart constructor that drops empty capture sets and fuses compatible capturiong types. + * An outer type capturing type A can be fused with an inner capturing type B if their + * boxing status is the same or if A is boxed. + */ + def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = + if refs.isAlwaysEmpty then parent + else parent match + case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => + apply(parent1, refs ++ refs1, boxed) + case _ => + AnnotatedType(parent, CaptureAnnotation(refs, boxed)(defn.RetainsAnnot)) + + /** An extractor that succeeds only during CheckCapturingPhase. Boxing statis is + * returned separately by CaptureOps.isBoxed. + */ + def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = + if ctx.phase == Phases.checkCapturesPhase + && tp.annot.symbol == defn.RetainsAnnot + && !ctx.mode.is(Mode.IgnoreCaptures) + then + EventuallyCapturingType.unapply(tp) + else None + +end CapturingType + +/** An extractor for types that will be capturing types at phase CheckCaptures. Also + * included are types that indicate captures on enclosing call-by-name parameters + * before phase ElimByName. + */ +object EventuallyCapturingType: + + def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = + val sym = tp.annot.symbol + if sym == defn.RetainsAnnot || sym == defn.RetainsByNameAnnot then + tp.annot match + case ann: CaptureAnnotation => + Some((tp.parent, ann.refs)) + case ann => + try Some((tp.parent, ann.tree.toCaptureSet)) + catch case ex: IllegalCaptureRef => None + else None + +end EventuallyCapturingType + + diff --git a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala new file mode 100644 index 000000000000..ce3f788202b6 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala @@ -0,0 +1,1039 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Phases.*, DenotTransformers.*, SymDenotations.* +import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* +import Types.*, StdNames.*, Denotations.* +import config.Printers.{capt, recheckr} +import config.{Config, Feature} +import ast.{tpd, untpd, Trees} +import Trees.* +import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents} +import typer.Checking.{checkBounds, checkAppliedTypesIn} +import util.{SimpleIdentitySet, EqHashMap, SrcPos} +import transform.SymUtils.* +import transform.{Recheck, PreRecheck} +import Recheck.* +import scala.collection.mutable +import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} +import StdNames.nme +import NameKinds.DefaultGetterName +import reporting.trace +import language.experimental.pureFunctions + +/** The capture checker */ +object CheckCaptures: + import ast.tpd.* + + class Pre extends PreRecheck, SymTransformer: + + override def isEnabled(using Context) = true + + /** Reset `private` flags of parameter accessors so that we can refine them + * in Setup if they have non-empty capture sets. Special handling of some + * symbols defined for case classes. + */ + def transformSym(sym: SymDenotation)(using Context): SymDenotation = + if sym.isAllOf(PrivateParamAccessor) && !sym.hasAnnotation(defn.ConstructorOnlyAnnot) then + sym.copySymDenotation(initFlags = sym.flags &~ Private | Recheck.ResetPrivate) + else if Synthetics.needsTransform(sym) then + Synthetics.transformToCC(sym) + else + sym + end Pre + + /** A class describing environments. + * @param owner the current owner + * @param nestedInOwner true if the environment is a temporary one nested in the owner's environment, + * and does not have a different actual owner symbol (this happens when doing box adaptation). + * @param captured the caputure set containing all references to tracked free variables outside of boxes + * @param isBoxed true if the environment is inside a box (in which case references are not counted) + * @param outer0 the next enclosing environment + */ + case class Env( + owner: Symbol, + nestedInOwner: Boolean, + captured: CaptureSet, + isBoxed: Boolean, + outer0: Env | Null + ): + def outer = outer0.nn + + def isOutermost = outer0 == null + + /** If an environment is open it tracks free references */ + def isOpen = !captured.isAlwaysEmpty && !isBoxed + end Env + + /** Similar normal substParams, but this is an approximating type map that + * maps parameters in contravariant capture sets to the empty set. + * TODO: check what happens with non-variant. + */ + final class SubstParamsMap(from: BindingType, to: List[Type])(using DetachedContext) + extends ApproximatingTypeMap, IdempotentCaptRefMap: + def apply(tp: Type): Type = tp match + case tp: ParamRef => + if tp.binder == from then to(tp.paramNum) else tp + case tp: NamedType => + if tp.prefix `eq` NoPrefix then tp + else tp.derivedSelect(apply(tp.prefix)) + case _: ThisType => + tp + case _ => + mapOver(tp) + + /** Check that a @retains annotation only mentions references that can be tracked. + * This check is performed at Typer. + */ + def checkWellformed(ann: Tree)(using Context): Unit = + for elem <- retainedElems(ann) do + elem.tpe match + case ref: CaptureRef => + if !ref.canBeTracked then + report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) + case tpe => + report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) + + /** If `tp` is a capturing type, check that all references it mentions have non-empty + * capture sets. Also: warn about redundant capture annotations. + * This check is performed after capture sets are computed in phase cc. + */ + def checkWellformedPost(tp: Type, pos: SrcPos)(using Context): Unit = tp match + case CapturingType(parent, refs) => + for ref <- refs.elems do + if ref.captureSetOfInfo.elems.isEmpty then + report.error(em"$ref cannot be tracked since its capture set is empty", pos) + else if parent.captureSet.accountsFor(ref) then + report.warning(em"redundant capture: $parent already accounts for $ref", pos) + case _ => + + /** Warn if `ann`, which is a tree of a @retains annotation, defines some elements that + * are already accounted for by other elements of the same annotation. + * Note: We need to perform the check on the original annotation rather than its + * capture set since the conversion to a capture set already eliminates redundant elements. + */ + def warnIfRedundantCaptureSet(ann: Tree)(using Context): Unit = + // The lists `elems(i) :: prev.reverse :: elems(0),...,elems(i-1),elems(i+1),elems(n)` + // where `n == elems.length-1`, i <- 0..n`. + // I.e. + // choices(Nil, elems) = [[elems(i), elems(0), ..., elems(i-1), elems(i+1), .... elems(n)] | i <- 0..n] + def choices(prev: List[Tree], elems: List[Tree]): List[List[Tree]] = elems match + case Nil => Nil + case elem :: elems => + List(elem :: (prev reverse_::: elems)) ++ choices(elem :: prev, elems) + for case first :: others <- choices(Nil, retainedElems(ann)) do + val firstRef = first.toCaptureRef + val remaining = CaptureSet(others.map(_.toCaptureRef)*) + if remaining.accountsFor(firstRef) then + report.warning(em"redundant capture: $remaining already accounts for $firstRef", ann.srcPos) + +class CheckCaptures extends Recheck, SymTransformer: + thisPhase => + + import ast.tpd.* + import CheckCaptures.* + + def phaseName: String = "cc" + override def isEnabled(using Context) = true + + def newRechecker()(using Context) = CaptureChecker(ctx.detach) + + override def run(using Context): Unit = + if Feature.ccEnabled then + checkOverrides.traverse(ctx.compilationUnit.tpdTree) + super.run + + override def transformSym(sym: SymDenotation)(using Context): SymDenotation = + if Synthetics.needsTransform(sym) then Synthetics.transformFromCC(sym) + else super.transformSym(sym) + + /** Check overrides again, taking capture sets into account. + * TODO: Can we avoid doing overrides checks twice? + * We need to do them here since only at this phase CaptureTypes are relevant + * But maybe we can then elide the check during the RefChecks phase under captureChecking? + */ + def checkOverrides = new TreeTraverser: + def traverse(t: Tree)(using Context) = + t match + case t: Template => checkAllOverrides(ctx.owner.asClass) + case _ => + traverseChildren(t) + + class CaptureChecker(ictx: DetachedContext) extends Rechecker(ictx): + import ast.tpd.* + + override def keepType(tree: Tree) = + super.keepType(tree) + || tree.isInstanceOf[Try] // type of `try` needs tp be checked for * escapes + + /** Instantiate capture set variables appearing contra-variantly to their + * upper approximation. + */ + private def interpolator(startingVariance: Int = 1)(using Context) = new TypeTraverser: + variance = startingVariance + override def traverse(t: Type) = + t match + case CapturingType(parent, refs: CaptureSet.Var) => + if variance < 0 then + capt.println(i"solving $t") + refs.solve() + traverse(parent) + case t @ RefinedType(_, nme.apply, rinfo) if defn.isFunctionOrPolyType(t) => + traverse(rinfo) + case tp: TypeVar => + case tp: TypeRef => + traverse(tp.prefix) + case _ => + traverseChildren(t) + + /** If `tpt` is an inferred type, interpolate capture set variables appearing contra- + * variantly in it. + */ + private def interpolateVarsIn(tpt: Tree)(using Context): Unit = + if tpt.isInstanceOf[InferredTypeTree] then + interpolator().traverse(tpt.knownType) + .showing(i"solved vars in ${tpt.knownType}", capt)(using null) + + /** Assert subcapturing `cs1 <: cs2` */ + def assertSub(cs1: CaptureSet, cs2: CaptureSet)(using Context) = + assert(cs1.subCaptures(cs2, frozen = false).isOK, i"$cs1 is not a subset of $cs2") + + /** Check subcapturing `{elem} <: cs`, report error on failure */ + def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos)(using Context) = + val res = elem.singletonCaptureSet.subCaptures(cs, frozen = false) + if !res.isOK then + report.error(em"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) + + /** Check subcapturing `cs1 <: cs2`, report error on failure */ + def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos)(using Context) = + val res = cs1.subCaptures(cs2, frozen = false) + if !res.isOK then + def header = + if cs1.elems.size == 1 then i"reference ${cs1.elems.toList}%, % is not" + else i"references $cs1 are not all" + report.error(em"$header included in allowed capture set ${res.blocking}", pos) + + /** The current environment */ + private var curEnv: Env = Env(NoSymbol, nestedInOwner = false, CaptureSet.empty, isBoxed = false, null) + + private val myCapturedVars: util.EqHashMap[Symbol, CaptureSet] = EqHashMap() + + /** If `sym` is a class or method nested inside a term, a capture set variable representing + * the captured variables of the environment associated with `sym`. + */ + def capturedVars(sym: Symbol)(using Context) = + myCapturedVars.getOrElseUpdate(sym, + if sym.ownersIterator.exists(_.isTerm) then CaptureSet.Var() + else CaptureSet.empty) + + /** For all nested environments up to `limit` perform `op` */ + def forallOuterEnvsUpTo(limit: Symbol)(op: Env => Unit)(using Context): Unit = + def recur(env: Env): Unit = + if env.isOpen && env.owner != limit then + op(env) + if !env.isOutermost then + var nextEnv = env.outer + if env.owner.isConstructor then + if nextEnv.owner != limit && !nextEnv.isOutermost then + recur(nextEnv.outer) + else recur(nextEnv) + recur(curEnv) + + /** Include `sym` in the capture sets of all enclosing environments nested in the + * the environment in which `sym` is defined. + */ + def markFree(sym: Symbol, pos: SrcPos)(using Context): Unit = + if sym.exists then + val ref = sym.termRef + if ref.isTracked then + forallOuterEnvsUpTo(sym.enclosure) { env => + capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") + checkElem(ref, env.captured, pos) + } + + /** Make sure (projected) `cs` is a subset of the capture sets of all enclosing + * environments. At each stage, only include references from `cs` that are outside + * the environment's owner + */ + def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = + if !cs.isAlwaysEmpty then + forallOuterEnvsUpTo(ctx.owner.topLevelClass) { env => + val included = cs.filter { + case ref: TermRef => + (env.nestedInOwner || env.owner != ref.symbol.owner) + && env.owner.isContainedIn(ref.symbol.owner) + case ref: ThisType => + (env.nestedInOwner || env.owner != ref.cls) + && env.owner.isContainedIn(ref.cls) + case _ => false + } + capt.println(i"Include call capture $included in ${env.owner}") + checkSubset(included, env.captured, pos) + } + + /** Include references captured by the called method in the current environment stack */ + def includeCallCaptures(sym: Symbol, pos: SrcPos)(using Context): Unit = + if sym.exists && curEnv.isOpen then markFree(capturedVars(sym), pos) + + override def recheckIdent(tree: Ident)(using Context): Type = + if tree.symbol.is(Method) then includeCallCaptures(tree.symbol, tree.srcPos) + else markFree(tree.symbol, tree.srcPos) + super.recheckIdent(tree) + + /** A specialized implementation of the selection rule. + * + * E |- f: Cf f { m: Cr R } + * ------------------------ + * E |- f.m: C R + * + * The implementation picks as `C` one of `{f}` or `Cr`, depending on the + * outcome of a `mightSubcapture` test. It picks `{f}` if this might subcapture Cr + * and Cr otherwise. + */ + override def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context) = { + def disambiguate(denot: Denotation): Denotation = denot match + case MultiDenotation(denot1, denot2) => + // This case can arise when we try to merge multiple types that have different + // capture sets on some part. For instance an asSeenFrom might produce + // a bi-mapped capture set arising from a substition. Applying the same substitution + // to the same type twice will nevertheless produce different capture setsw which can + // lead to a failure in disambiguation since neither alternative is better than the + // other in a frozen constraint. An example test case is disambiguate-select.scala. + // We address the problem by disambiguating while ignoring all capture sets as a fallback. + withMode(Mode.IgnoreCaptures) { + disambiguate(denot1).meet(disambiguate(denot2), qualType) + } + case _ => denot + + val selType = recheckSelection(tree, qualType, name, disambiguate) + val selCs = selType.widen.captureSet + if selCs.isAlwaysEmpty || selType.widen.isBoxedCapturing || qualType.isBoxedCapturing then + selType + else + val qualCs = qualType.captureSet + capt.println(i"intersect $qualType, ${selType.widen}, $qualCs, $selCs in $tree") + if qualCs.mightSubcapture(selCs) + && !selCs.mightSubcapture(qualCs) + && !pt.stripCapturing.isInstanceOf[SingletonType] + then + selType.widen.stripCapturing.capturing(qualCs) + .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) + else + selType + }//.showing(i"recheck sel $tree, $qualType = $result") + + /** A specialized implementation of the apply rule. + * + * E |- f: Cf (Ra -> Cr Rr) + * E |- a: Ca Ra + * ------------------------ + * E |- f a: C Rr + * + * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the + * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr + * and Cr otherwise. + */ + override def recheckApply(tree: Apply, pt: Type)(using Context): Type = + val meth = tree.fun.symbol + includeCallCaptures(meth, tree.srcPos) + def mapArgUsing(f: Type => Type) = + val arg :: Nil = tree.args: @unchecked + val argType0 = f(recheckStart(arg, pt)) + val argType = super.recheckFinish(argType0, arg, pt) + super.recheckFinish(argType, tree, pt) + + if meth == defn.Caps_unsafeBox then + mapArgUsing(_.forceBoxStatus(true)) + else if meth == defn.Caps_unsafeUnbox then + mapArgUsing(_.forceBoxStatus(false)) + else if meth == defn.Caps_unsafeBoxFunArg then + mapArgUsing { + case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual, isErased) => + defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual, isErased) + } + else + super.recheckApply(tree, pt) match + case appType @ CapturingType(appType1, refs) => + tree.fun match + case Select(qual, _) + if !tree.fun.symbol.isConstructor + && !qual.tpe.isBoxedCapturing + && !tree.args.exists(_.tpe.isBoxedCapturing) + && qual.tpe.captureSet.mightSubcapture(refs) + && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) + => + val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => + cs ++ arg.tpe.captureSet) + appType.derivedCapturingType(appType1, callCaptures) + .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) + case _ => appType + case appType => appType + end recheckApply + + /** Handle an application of method `sym` with type `mt` to arguments of types `argTypes`. + * This means: + * - Instantiate result type with actual arguments + * - If call is to a constructor: + * - remember types of arguments corresponding to tracked + * parameters in refinements. + * - add capture set of instantiated class to capture set of result type. + */ + override def instantiate(mt: MethodType, argTypes: List[Type], sym: Symbol)(using Context): Type = + val ownType = + if mt.isResultDependent then SubstParamsMap(mt, argTypes)(mt.resType) + else mt.resType + + if sym.isConstructor then + val cls = sym.owner.asClass + + /** First half of result pair: + * Refine the type of a constructor call `new C(t_1, ..., t_n)` + * to C{val x_1: T_1, ..., x_m: T_m} where x_1, ..., x_m are the tracked + * parameters of C and T_1, ..., T_m are the types of the corresponding arguments. + * + * Second half: union of all capture sets of arguments to tracked parameters. + */ + def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = + mt.paramNames.lazyZip(argTypes).foldLeft((core, initCs)) { (acc, refine) => + val (core, allCaptures) = acc + val (getterName, argType) = refine + val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol + if getter.termRef.isTracked && !getter.is(Private) + then (RefinedType(core, getterName, argType), allCaptures ++ argType.captureSet) + else (core, allCaptures) + } + + def augmentConstructorType(core: Type, initCs: CaptureSet): Type = core match + case core: MethodType => + // more parameters to follow; augment result type + core.derivedLambdaType(resType = augmentConstructorType(core.resType, initCs)) + case CapturingType(parent, refs) => + // can happen for curried constructors if instantiate of a previous step + // added capture set to result. + augmentConstructorType(parent, initCs ++ refs) + case _ => + val (refined, cs) = addParamArgRefinements(core, initCs) + refined.capturing(cs) + + augmentConstructorType(ownType, CaptureSet.empty) match + case augmented: MethodType => + augmented + case augmented => + // add capture sets of class and constructor to final result of constructor call + augmented.capturing(capturedVars(cls) ++ capturedVars(sym)) + .showing(i"constr type $mt with $argTypes%, % in $cls = $result", capt) + else ownType + end instantiate + + override def recheckClosure(tree: Closure, pt: Type)(using Context): Type = + val cs = capturedVars(tree.meth.symbol) + capt.println(i"typing closure $tree with cvs $cs") + super.recheckClosure(tree, pt).capturing(cs) + .showing(i"rechecked $tree / $pt = $result", capt) + + /** Additionally to normal processing, update types of closures if the expected type + * is a function with only pure parameters. In that case, make the anonymous function + * also have the same parameters as the prototype. + * TODO: Develop a clearer rationale for this. + * TODO: Can we generalize this to arbitrary parameters? + * Currently some tests fail if we do this. (e.g. neg.../stackAlloc.scala, others) + */ + override def recheckBlock(block: Block, pt: Type)(using Context): Type = + block match + case closureDef(mdef) => + pt.dealias match + case defn.FunctionOf(ptformals, _, _, _) + if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => + // Redo setup of the anonymous function so that formal parameters don't + // get capture sets. This is important to avoid false widenings to `*` + // when taking the base type of the actual closures's dependent function + // type so that it conforms to the expected non-dependent function type. + // See withLogFile.scala for a test case. + val meth = mdef.symbol + // First, undo the previous setup which installed a completer for `meth`. + atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) + .installAfter(preRecheckPhase) + + // Next, update all parameter symbols to match expected formals + meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => + psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) + } + // Next, update types of parameter ValDefs + mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => + val ValDef(_, tpt, _) = param: @unchecked + tpt.rememberTypeAlways(pformal) + } + // Next, install a new completer reflecting the new parameters for the anonymous method + val mt = meth.info.asInstanceOf[MethodType] + val completer = new LazyType: + def complete(denot: SymDenotation)(using Context) = + denot.info = mt.companion(ptformals, mdef.tpt.knownType) + .showing(i"simplify info of $meth to $result", capt) + recheckDef(mdef, meth) + meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) + case _ => + case _ => + super.recheckBlock(block, pt) + + override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Unit = + try + if !sym.is(Module) then // Modules are checked by checking the module class + super.recheckValDef(tree, sym) + finally + if !sym.is(Param) then + // Parameters with inferred types belong to anonymous methods. We need to wait + // for more info from the context, so we cannot interpolate. Note that we cannot + // expect to have all necessary info available at the point where the anonymous + // function is compiled since we do not propagate expected types into blocks. + interpolateVarsIn(tree.tpt) + + override def recheckDefDef(tree: DefDef, sym: Symbol)(using Context): Unit = + if !Synthetics.isExcluded(sym) then + val saved = curEnv + val localSet = capturedVars(sym) + if !localSet.isAlwaysEmpty then curEnv = Env(sym, nestedInOwner = false, localSet, isBoxed = false, curEnv) + try super.recheckDefDef(tree, sym) + finally + interpolateVarsIn(tree.tpt) + curEnv = saved + + /** Class-specific capture set relations: + * 1. The capture set of a class includes the capture sets of its parents. + * 2. The capture set of the self type of a class includes the capture set of the class. + * 3. The capture set of the self type of a class includes the capture set of every class parameter, + * unless the parameter is marked @constructorOnly. + */ + override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = + val saved = curEnv + val localSet = capturedVars(cls) + for parent <- impl.parents do // (1) + checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos) + if !localSet.isAlwaysEmpty then curEnv = Env(cls, nestedInOwner = false, localSet, isBoxed = false, curEnv) + try + val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") + checkSubset(localSet, thisSet, tree.srcPos) // (2) + for param <- cls.paramGetters do + if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then + checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + for pureBase <- cls.pureBaseClass do + checkSubset(thisSet, + CaptureSet.empty.withDescription(i"of pure base class $pureBase"), + tree.srcPos) + super.recheckClassDef(tree, impl, cls) + finally + curEnv = saved + + /** If type is of the form `T @requiresCapability(x)`, + * mark `x` as free in the current environment. This is used to require the + * correct `CanThrow` capability when encountering a `throw`. + */ + override def recheckTyped(tree: Typed)(using Context): Type = + tree.tpt.tpe match + case AnnotatedType(_, annot) if annot.symbol == defn.RequiresCapabilityAnnot => + annot.tree match + case Apply(_, cap :: Nil) => + markFree(cap.symbol, tree.srcPos) + case _ => + case _ => + super.recheckTyped(tree) + + /* Currently not needed, since capture checking takes place after ElimByName. + * Keep around in case we need to get back to it + def recheckByNameArg(tree: Tree, pt: Type)(using Context): Type = + val closureDef(mdef) = tree: @unchecked + val arg = mdef.rhs + val localSet = CaptureSet.Var() + curEnv = Env(mdef.symbol, localSet, isBoxed = false, curEnv) + val result = + try + inContext(ctx.withOwner(mdef.symbol)) { + recheckStart(arg, pt).capturing(localSet) + } + finally curEnv = curEnv.outer + recheckFinish(result, arg, pt) + */ + + /** If expected type `pt` is boxed and the tree is a function or a reference, + * don't propagate free variables. + * Otherwise, if the result type is boxed, simulate an unboxing by + * adding all references in the boxed capture set to the current environment. + */ + override def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = + if tree.isTerm && pt.isBoxedCapturing then + val saved = curEnv + + tree match + case _: RefTree | closureDef(_) => + curEnv = Env(curEnv.owner, nestedInOwner = false, CaptureSet.Var(), isBoxed = true, curEnv) + case _ => + + try super.recheck(tree, pt) + finally curEnv = saved + else + val res = super.recheck(tree, pt) + if tree.isTerm then markFree(res.boxedCaptureSet, tree.srcPos) + res + + /** If `tree` is a reference or an application where the result type refers + * to an enclosing class or method parameter of the reference, check that the result type + * does not capture the universal capability. This is justified since the + * result type would have to be implicitly unboxed. + * TODO: Can we find a cleaner way to achieve this? Logically, this should be part + * of simulated boxing and unboxing. + */ + override def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = + val typeToCheck = tree match + case _: Ident | _: Select | _: Apply | _: TypeApply if tree.symbol.unboxesResult => + tpe + case _: Try => + tpe + case _ => + NoType + def checkNotUniversal(tp: Type): Unit = tp.widenDealias match + case wtp @ CapturingType(parent, refs) => + refs.disallowRootCapability { () => + val kind = if tree.isInstanceOf[ValDef] then "mutable variable" else "expression" + report.error( + em"""The $kind's type $wtp is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime.""", + tree.srcPos) + } + checkNotUniversal(parent) + case _ => + checkNotUniversal(typeToCheck) + super.recheckFinish(tpe, tree, pt) + + /** Massage `actual` and `expected` types using the methods below before checking conformance */ + override def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = + val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) + val actual1 = adaptBoxed(actual, expected1, tree.srcPos) + //println(i"check conforms $actual1 <<< $expected1") + super.checkConformsExpr(actual1, expected1, tree) + + private def toDepFun(args: List[Type], resultType: Type, isContextual: Boolean, isErased: Boolean)(using Context): Type = + MethodType.companion(isContextual = isContextual, isErased = isErased)(args, resultType) + .toFunctionType(isJava = false, alwaysDependent = true) + + /** Turn `expected` into a dependent function when `actual` is dependent. */ + private def alignDependentFunction(expected: Type, actual: Type)(using Context): Type = + def recur(expected: Type): Type = expected.dealias match + case expected @ CapturingType(eparent, refs) => + CapturingType(recur(eparent), refs, boxed = expected.isBoxed) + case expected @ defn.FunctionOf(args, resultType, isContextual, isErased) + if defn.isNonRefinedFunction(expected) && defn.isFunctionType(actual) && !defn.isNonRefinedFunction(actual) => + val expected1 = toDepFun(args, resultType, isContextual, isErased) + expected1 + case _ => + expected + recur(expected) + + /** For the expected type, implement the rule outlined in #14390: + * - when checking an expression `a: Ca Ta` against an expected type `Ce Te`, + * - where the capture set `Ce` contains Cls.this, + * - and where and all method definitions enclosing `a` inside class `Cls` + * have only pure parameters, + * - add to `Ce` all references to variables or this-references in `Ca` + * that are outside `Cls`. These are all accessed through `Cls.this`, + * so we can assume they are already accounted for by `Ce` and adding + * them explicitly to `Ce` changes nothing. + */ + private def addOuterRefs(expected: Type, actual: Type)(using Context): Type = + def isPure(info: Type): Boolean = info match + case info: PolyType => isPure(info.resType) + case info: MethodType => info.paramInfos.forall(_.captureSet.isAlwaysEmpty) && isPure(info.resType) + case _ => true + def isPureContext(owner: Symbol, limit: Symbol): Boolean = + if owner == limit then true + else if !owner.exists then false + else isPure(owner.info) && isPureContext(owner.owner, limit) + def augment(erefs: CaptureSet, arefs: CaptureSet): CaptureSet = + (erefs /: erefs.elems) { (erefs, eref) => + eref match + case eref: ThisType if isPureContext(ctx.owner, eref.cls) => + erefs ++ arefs.filter { + case aref: TermRef => eref.cls.isProperlyContainedIn(aref.symbol.owner) + case aref: ThisType => eref.cls.isProperlyContainedIn(aref.cls) + case _ => false + } + case _ => + erefs + } + expected match + case CapturingType(ecore, erefs) => + val erefs1 = augment(erefs, actual.captureSet) + if erefs1 ne erefs then + capt.println(i"augmented $expected from ${actual.captureSet} --> $erefs1") + expected.derivedCapturingType(ecore, erefs1) + case _ => + expected + + /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions */ + def adaptBoxed(actual: Type, expected: Type, pos: SrcPos)(using Context): Type = + + /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) + * to `expected` type. + * It returns the adapted type along with the additionally captured variable + * during adaptation. + * @param reconstruct how to rebuild the adapted function type + */ + def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, + covariant: Boolean, boxed: Boolean, + reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val (eargs, eres) = expected.dealias.stripCapturing match + case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) + case expected: MethodType => (expected.paramInfos, expected.resType) + case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) + case _ => (aargs.map(_ => WildcardType), WildcardType) + val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if (ares1 eq ares) && (aargs1 eq aargs) then actual + else reconstruct(aargs1, ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + + /** Adapt type function type `actual` to the expected type. + * @see [[adaptFun]] + */ + def adaptTypeFun( + actual: Type, ares: Type, expected: Type, + covariant: Boolean, boxed: Boolean, + reconstruct: Type => Type): (Type, CaptureSet) = + val saved = curEnv + curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) + + try + val eres = expected.dealias.stripCapturing match + case RefinedType(_, _, rinfo: PolyType) => rinfo.resType + case expected: PolyType => expected.resType + case _ => WildcardType + + val ares1 = adapt(ares, eres, covariant) + + val resTp = + if ares1 eq ares then actual + else reconstruct(ares1) + + (resTp, curEnv.captured) + finally + curEnv = saved + end adaptTypeFun + + def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = + val arrow = if covariant then "~~>" else "<~~" + i"adapting $actual $arrow $expected" + + /** Destruct a capturing type `tp` to a tuple (cs, tp0, boxed), + * where `tp0` is not a capturing type. + * + * If `tp` is a nested capturing type, the return tuple always represents + * the innermost capturing type. The outer capture annotations can be + * reconstructed with the returned function. + */ + def destructCapturingType(tp: Type, reconstruct: Type -> Context ?-> Type = (x: Type) => x) // !cc! need monomorphic default argument + : (Type, CaptureSet, Boolean, Type -> Context ?-> Type) = + tp.dealias match + case tp @ CapturingType(parent, cs) => + if parent.dealias.isCapturingType then + destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) + else + (parent, cs, tp.isBoxed, reconstruct) + case actual => + (actual, CaptureSet(), false, reconstruct) + + def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { + if expected.isInstanceOf[WildcardType] then actual + else + val (parent, cs, actualIsBoxed, recon: (Type -> Context ?-> Type)) = destructCapturingType(actual) + + val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing + val insertBox = needsAdaptation && covariant != actualIsBoxed + + val (parent1, cs1) = parent match { + case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => + val (parent1, leaked) = adaptFun(parent, args.init, args.last, expected, covariant, insertBox, + (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) + (parent1, leaked ++ cs) + case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => + // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) + val (parent1, leaked) = adaptFun(parent, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, + (aargs1, ares1) => + rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) + .toFunctionType(isJava = false, alwaysDependent = true)) + (parent1, leaked ++ cs) + case actual: MethodType => + val (parent1, leaked) = adaptFun(parent, actual.paramInfos, actual.resType, expected, covariant, insertBox, + (aargs1, ares1) => + actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) + (parent1, leaked ++ cs) + case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => + val (parent1, leaked) = adaptTypeFun(parent, rinfo.resType, expected, covariant, insertBox, + ares1 => + val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) + val actual1 = actual.derivedRefinedType(p, nme, rinfo1) + actual1 + ) + (parent1, leaked ++ cs) + case _ => + (parent, cs) + } + + if needsAdaptation then + val criticalSet = // the set which is not allowed to have `*` + if covariant then cs1 // can't box with `*` + else expected.captureSet // can't unbox with `*` + if criticalSet.isUniversal && expected.isValueType then + // We can't box/unbox the universal capability. Leave `actual` as it is + // so we get an error in checkConforms. This tends to give better error + // messages than disallowing the root capability in `criticalSet`. + if ctx.settings.YccDebug.value then + println(i"cannot box/unbox $actual vs $expected") + actual + else + // Disallow future addition of `*` to `criticalSet`. + criticalSet.disallowRootCapability { () => + report.error( + em"""$actual cannot be box-converted to $expected + |since one of their capture sets contains the root capability `*`""", + pos) + } + if !insertBox then // unboxing + markFree(criticalSet, pos) + recon(CapturingType(parent1, cs1, !actualIsBoxed)) + else + recon(CapturingType(parent1, cs1, actualIsBoxed)) + } + + var actualw = actual.widenDealias + actual match + case ref: CaptureRef if ref.isTracked => + actualw match + case CapturingType(p, refs) => + actualw = actualw.derivedCapturingType(p, ref.singletonCaptureSet) + // given `a: C T`, improve `C T` to `{a} T` + case _ => + case _ => + val adapted = adapt(actualw, expected, covariant = true) + if adapted ne actualw then + capt.println(i"adapt boxed $actual vs $expected ===> $adapted") + adapted + else actual + end adaptBoxed + + override def checkUnit(unit: CompilationUnit)(using Context): Unit = + Setup(preRecheckPhase, thisPhase, recheckDef) + .traverse(ctx.compilationUnit.tpdTree) + //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") + withCaptureSetsExplained { + super.checkUnit(unit) + checkSelfTypes(unit.tpdTree) + postCheck(unit.tpdTree) + if ctx.settings.YccDebug.value then + show(unit.tpdTree) // this does not print tree, but makes its variables visible for dependency printing + } + + /** Check that self types of subclasses conform to self types of super classes. + * (See comment below how this is achieved). The check assumes that classes + * without an explicit self type have the universal capture set `{*}` on the + * self type. If a class without explicit self type is not `effectivelyFinal` + * it is checked that the inferred self type is universal, in order to assure + * that joint and separate compilation give the same result. + */ + def checkSelfTypes(unit: tpd.Tree)(using Context): Unit = + val parentTrees = mutable.HashMap[Symbol, List[Tree]]() + unit.foreachSubTree { + case cdef @ TypeDef(_, impl: Template) => parentTrees(cdef.symbol) = impl.parents + case _ => + } + // Perform self type checking. The problem here is that `checkParents` compares a + // self type of a subclass with the result of an asSeenFrom of the self type of the + // superclass. That's no good. We need to constrain the original superclass self type + // capture set, not the set mapped by asSeenFrom. + // + // Instead, we proceed from parent classes to child classes. For every class + // we first check its parents, and then interpolate the self type to an + // upper approximation that satisfies all constraints on its capture set. + // That means all capture sets of parent self types are constants, so mapping + // them with asSeenFrom is OK. + while parentTrees.nonEmpty do + val roots = parentTrees.keysIterator.filter { + cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) + } + assert(roots.nonEmpty) + for case root: ClassSymbol <- roots do + checkSelfAgainstParents(root, root.baseClasses) + val selfType = root.asClass.classInfo.selfType + interpolator(startingVariance = -1).traverse(selfType) + if !root.isEffectivelySealed then + def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = + cls.baseClasses.tail.exists { psym => + val selfType = psym.asClass.givenSelfType + selfType.exists && selfType.captureSet.elems == refs.elems + } + selfType match + case CapturingType(_, refs: CaptureSet.Var) + if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => + // Forbid inferred self types unless they are already implied by an explicit + // self type in a parent. + report.error( + em"""$root needs an explicitly declared self type since its + |inferred self type $selfType + |is not visible in other compilation units that define subclasses.""", + root.srcPos) + case _ => + parentTrees -= root + capt.println(i"checked $root with $selfType") + end checkSelfTypes + + /** Heal ill-formed capture sets in the type parameter. + * + * We can push parameter refs into a capture set in type parameters + * that this type parameter can't see. + * For example, when capture checking the following expression: + * + * def usingLogFile[T](op: (f: {*} File) => T): T = ... + * + * usingLogFile[box ?1 () -> Unit] { (f: {*} File) => () => { f.write(0) } } + * + * We may propagate `f` into ?1, making ?1 ill-formed. + * This also causes soundness issues, since `f` in ?1 should be widened to `*`, + * giving rise to an error that `*` cannot be included in a boxed capture set. + * + * To solve this, we still allow ?1 to capture parameter refs like `f`, but + * compensate this by pushing the widened capture set of `f` into ?1. + * This solves the soundness issue caused by the ill-formness of ?1. + */ + private def healTypeParam(tree: Tree)(using Context): Unit = + val checker = new TypeTraverser: + private def isAllowed(ref: CaptureRef): Boolean = ref match + case ref: TermParamRef => allowed.contains(ref) + case _ => true + + // Widen the given term parameter refs x₁ : C₁ S₁ , ⋯ , xₙ : Cₙ Sₙ to their capture sets C₁ , ⋯ , Cₙ. + // + // If in these capture sets there are any capture references that are term parameter references we should avoid, + // we will widen them recursively. + private def widenParamRefs(refs: List[TermParamRef]): List[CaptureSet] = + @scala.annotation.tailrec + def recur(todos: List[TermParamRef], acc: List[CaptureSet]): List[CaptureSet] = + todos match + case Nil => acc + case ref :: rem => + val cs = ref.captureSetOfInfo + val nextAcc = cs.filter(isAllowed(_)) :: acc + val nextRem: List[TermParamRef] = (cs.elems.toList.filter(!isAllowed(_)) ++ rem).asInstanceOf + recur(nextRem, nextAcc) + recur(refs, Nil) + + private def healCaptureSet(cs: CaptureSet): Unit = + val toInclude = widenParamRefs(cs.elems.toList.filter(!isAllowed(_)).asInstanceOf) + toInclude.foreach(checkSubset(_, cs, tree.srcPos)) + + private var allowed: SimpleIdentitySet[TermParamRef] = SimpleIdentitySet.empty + + def traverse(tp: Type) = + tp match + case CapturingType(parent, refs) => + healCaptureSet(refs) + traverse(parent) + case tp @ RefinedType(parent, rname, rinfo: MethodType) if defn.isFunctionType(tp) => + traverse(rinfo) + case tp: TermLambda => + val saved = allowed + try + tp.paramRefs.foreach(allowed += _) + traverseChildren(tp) + finally allowed = saved + case _ => + traverseChildren(tp) + + if tree.isInstanceOf[InferredTypeTree] then + checker.traverse(tree.knownType) + end healTypeParam + + /** Perform the following kinds of checks + * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. + * - Check that externally visible `val`s or `def`s have empty capture sets. If not, + * suggest an explicit type. This is so that separate compilation (where external + * symbols have empty capture sets) gives the same results as joint compilation. + * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. + * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. + */ + def postCheck(unit: tpd.Tree)(using Context): Unit = + unit.foreachSubTree { + case _: InferredTypeTree => + case tree: TypeTree if !tree.span.isZeroExtent => + tree.knownType.foreachPart { tp => + checkWellformedPost(tp, tree.srcPos) + tp match + case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => + warnIfRedundantCaptureSet(annot.tree) + case _ => + } + case t: ValOrDefDef + if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => + val sym = t.symbol + val isLocal = + sym.owner.ownersIterator.exists(_.isTerm) + || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) + def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly + sym.is(Private) // private symbols can always have inferred types + || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be + // too annoying. This is a hole since a defualt getter's result type + // might leak into a type variable. + || // non-local symbols cannot have inferred types since external capture types are not inferred + isLocal // local symbols still need explicit types if + && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference + def isNotPureThis(ref: CaptureRef) = ref match { + case ref: ThisType => !ref.cls.isPureClass + case _ => true + } + if !canUseInferred then + val inferred = t.tpt.knownType + def checkPure(tp: Type) = tp match + case CapturingType(_, refs) + if !refs.elems.filter(isNotPureThis).isEmpty => + val resultStr = if t.isInstanceOf[DefDef] then " result" else "" + report.error( + em"""Non-local $sym cannot have an inferred$resultStr type + |$inferred + |with non-empty capture set $refs. + |The type needs to be declared explicitly.""".withoutDisambiguation(), + t.srcPos) + case _ => + inferred.foreachPart(checkPure, StopAt.Static) + case t @ TypeApply(fun, args) => + fun.knownType.widen match + case tl: PolyType => + val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => + arg.withType(arg.knownType.forceBoxStatus( + bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) + } + checkBounds(normArgs, tl) + case _ => + + args.foreach(healTypeParam(_)) + case _ => + } + if !ctx.reporter.errorsReported then + // We dont report errors here if previous errors were reported, because other + // errors often result in bad applied types, but flagging these bad types gives + // often worse error messages than the original errors. + val checkApplied = new TreeTraverser: + def traverse(t: Tree)(using Context) = t match + case tree: InferredTypeTree => + case tree: New => + case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) + case _ => traverseChildren(t) + checkApplied.traverse(unit) + end CaptureChecker +end CheckCaptures diff --git a/tests/pos-with-compiler-cc/dotc/cc/Setup.scala b/tests/pos-with-compiler-cc/dotc/cc/Setup.scala new file mode 100644 index 000000000000..a91831022984 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/Setup.scala @@ -0,0 +1,482 @@ +package dotty.tools +package dotc +package cc + +import core._ +import Phases.*, DenotTransformers.*, SymDenotations.* +import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* +import Types.*, StdNames.* +import config.Printers.capt +import ast.tpd +import transform.Recheck.* +import CaptureSet.IdentityCaptRefMap +import Synthetics.isExcluded + +/** A tree traverser that prepares a compilation unit to be capture checked. + * It does the following: + * - For every inferred type, drop any retains annotations, + * add capture sets to all its parts, add refinements to class types and function types. + * (c.f. mapInferred) + * - For explicit capturing types, expand throws aliases to the underlying (pure) function, + * and add some implied capture sets to curried functions (c.f. expandThrowsAlias, expandAbbreviations). + * - Add capture sets to self types of classes and objects, unless the self type was written explicitly. + * - Box the types of mutable variables and type arguments to methods (type arguments of types + * are boxed on access). + * - Link the external types of val and def symbols with the inferred types based on their parameter symbols. + */ +class Setup( + preRecheckPhase: DenotTransformer, + thisPhase: DenotTransformer, + recheckDef: (tpd.ValOrDefDef, Symbol) => Context ?=> Unit) +extends tpd.TreeTraverser: + import tpd.* + + /** Create dependent function with underlying function class `tycon` and given + * arguments `argTypes` and result `resType`. + */ + private def depFun(tycon: Type, argTypes: List[Type], resType: Type)(using Context): Type = + MethodType.companion( + isContextual = defn.isContextFunctionClass(tycon.classSymbol), + isErased = defn.isErasedFunctionClass(tycon.classSymbol) + )(argTypes, resType) + .toFunctionType(isJava = false, alwaysDependent = true) + + /** If `tp` is an unboxed capturing type or a function returning an unboxed capturing type, + * convert it to be boxed. + */ + private def box(tp: Type)(using Context): Type = + def recur(tp: Type): Type = tp.dealias match + case tp @ CapturingType(parent, refs) if !tp.isBoxed => + tp.boxed + case tp1 @ AppliedType(tycon, args) if defn.isNonRefinedFunction(tp1) => + val res = args.last + val boxedRes = recur(res) + if boxedRes eq res then tp + else tp1.derivedAppliedType(tycon, args.init :+ boxedRes) + case tp1 @ RefinedType(_, _, rinfo) if defn.isFunctionType(tp1) => + val boxedRinfo = recur(rinfo) + if boxedRinfo eq rinfo then tp + else boxedRinfo.toFunctionType(isJava = false, alwaysDependent = true) + case tp1: MethodOrPoly => + val res = tp1.resType + val boxedRes = recur(res) + if boxedRes eq res then tp + else tp1.derivedLambdaType(resType = boxedRes) + case _ => tp + tp match + case tp: MethodOrPoly => tp // don't box results of methods outside refinements + case _ => recur(tp) + + /** Perform the following transformation steps everywhere in a type: + * 1. Drop retains annotations + * 2. Turn plain function types into dependent function types, so that + * we can refer to their parameters in capture sets. Currently this is + * only done at the toplevel, i.e. for function types that are not + * themselves argument types of other function types. Without this restriction + * pos.../lists.scala and pos/...curried-shorthands.scala fail. + * Need to figure out why. + * 3. Refine other class types C by adding capture set variables to their parameter getters + * (see addCaptureRefinements) + * 4. Add capture set variables to all types that can be tracked + * + * Polytype bounds are only cleaned using step 1, but not otherwise transformed. + */ + private def mapInferred(using DetachedContext) = new TypeMap: + + /** Drop @retains annotations everywhere */ + object cleanup extends TypeMap: + def apply(t: Type) = t match + case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => + apply(parent) + case _ => + mapOver(t) + + /** Refine a possibly applied class type C where the class has tracked parameters + * x_1: T_1, ..., x_n: T_n to C { val x_1: CV_1 T_1, ..., val x_n: CV_n T_n } + * where CV_1, ..., CV_n are fresh capture sets. + */ + def addCaptureRefinements(tp: Type): Type = tp match + case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => + tp.typeSymbol match + case cls: ClassSymbol + if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => + // We assume that Java classes can refer to capturing Scala types only indirectly, + // using type parameters. Hence, no need to refine them. + cls.paramGetters.foldLeft(tp) { (core, getter) => + if getter.termRef.isTracked then + val getterType = tp.memberInfo(getter).strippedDealias + RefinedType(core, getter.name, CapturingType(getterType, CaptureSet.Var())) + .showing(i"add capture refinement $tp --> $result", capt) + else + core + } + case _ => tp + case _ => tp + + private def superTypeIsImpure(tp: Type): Boolean = { + tp.dealias match + case CapturingType(_, refs) => + !refs.isAlwaysEmpty + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then + sym == defn.AnyClass + // we assume Any is a shorthand of {*} Any, so if Any is an upper + // bound, the type is taken to be impure. + else superTypeIsImpure(tp.superType) + case tp: (RefinedOrRecType | MatchType) => + superTypeIsImpure(tp.underlying) + case tp: AndType => + superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) + case tp: OrType => + superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) + case _ => + false + }.showing(i"super type is impure $tp = $result", capt) + + /** Should a capture set variable be added on type `tp`? */ + def needsVariable(tp: Type): Boolean = { + tp.typeParams.isEmpty && tp.match + case tp: (TypeRef | AppliedType) => + val tp1 = tp.dealias + if tp1 ne tp then needsVariable(tp1) + else + val sym = tp1.typeSymbol + if sym.isClass then + !sym.isPureClass && sym != defn.AnyClass + else superTypeIsImpure(tp1) + case tp: (RefinedOrRecType | MatchType) => + needsVariable(tp.underlying) + case tp: AndType => + needsVariable(tp.tp1) && needsVariable(tp.tp2) + case tp: OrType => + needsVariable(tp.tp1) || needsVariable(tp.tp2) + case CapturingType(parent, refs) => + needsVariable(parent) + && refs.isConst // if refs is a variable, no need to add another + && !refs.isUniversal // if refs is {*}, an added variable would not change anything + case _ => + false + }.showing(i"can have inferred capture $tp = $result", capt) + + /** Add a capture set variable to `tp` if necessary, or maybe pull out + * an embedded capture set variable from a part of `tp`. + */ + def addVar(tp: Type) = tp match + case tp @ RefinedType(parent @ CapturingType(parent1, refs), rname, rinfo) => + CapturingType(tp.derivedRefinedType(parent1, rname, rinfo), refs, parent.isBoxed) + case tp: RecType => + tp.parent match + case parent @ CapturingType(parent1, refs) => + CapturingType(tp.derivedRecType(parent1), refs, parent.isBoxed) + case _ => + tp // can return `tp` here since unlike RefinedTypes, RecTypes are never created + // by `mapInferred`. Hence if the underlying type admits capture variables + // a variable was already added, and the first case above would apply. + case AndType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(refs1.asVar.elems.isEmpty) + assert(refs2.asVar.elems.isEmpty) + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(AndType(parent1, parent2), refs1 ** refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(refs1.asVar.elems.isEmpty) + assert(refs2.asVar.elems.isEmpty) + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(OrType(parent1, parent2, tp.isSoft), refs1 ++ refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2) => + CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) + case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => + CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) + case _ if needsVariable(tp) => + val cs = tp.dealias match + case CapturingType(_, refs) => CaptureSet.Var(refs.elems) + case _ => CaptureSet.Var() + CapturingType(tp, cs) + case _ => + tp + + private var isTopLevel = true + + private def mapNested(ts: List[Type]): List[Type] = + val saved = isTopLevel + isTopLevel = false + try ts.mapConserve(this) finally isTopLevel = saved + + def apply(t: Type) = + val tp = expandThrowsAlias(t) + val tp1 = tp match + case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => + // Drop explicit retains annotations + apply(parent) + case tp @ AppliedType(tycon, args) => + val tycon1 = this(tycon) + if defn.isNonRefinedFunction(tp) then + // Convert toplevel generic function types to dependent functions + val args0 = args.init + var res0 = args.last + val args1 = mapNested(args0) + val res1 = this(res0) + if isTopLevel then + depFun(tycon1, args1, res1) + .showing(i"add function refinement $tp --> $result", capt) + else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then + tp + else + tp.derivedAppliedType(tycon1, args1 :+ res1) + else + tp.derivedAppliedType(tycon1, args.mapConserve(arg => this(arg))) + case tp @ RefinedType(core, rname, rinfo) if defn.isFunctionType(tp) => + val rinfo1 = apply(rinfo) + if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) + else tp + case tp: MethodType => + tp.derivedLambdaType( + paramInfos = mapNested(tp.paramInfos), + resType = this(tp.resType)) + case tp: TypeLambda => + // Don't recurse into parameter bounds, just cleanup any stray retains annotations + tp.derivedLambdaType( + paramInfos = tp.paramInfos.mapConserve(cleanup(_).bounds), + resType = this(tp.resType)) + case _ => + mapOver(tp) + addVar(addCaptureRefinements(tp1)) + end apply + end mapInferred + + private def transformInferredType(tp: Type, boxed: Boolean)(using Context): Type = + val tp1 = mapInferred(tp) + if boxed then box(tp1) else tp1 + + /** Expand some aliases of function types to the underlying functions. + * Right now, these are only $throws aliases, but this could be generalized. + */ + private def expandThrowsAlias(tp: Type)(using Context) = tp match + case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => + // hard-coded expansion since $throws aliases in stdlib are defined with `?=>` rather than `?->` + defn.FunctionOf(defn.CanThrowClass.typeRef.appliedTo(exc) :: Nil, res, isContextual = true, isErased = true) + case _ => tp + + private def expandThrowsAliases(using DetachedContext) = new TypeMap: + def apply(t: Type) = t match + case _: AppliedType => + val t1 = expandThrowsAlias(t) + if t1 ne t then apply(t1) else mapOver(t) + case _: LazyRef => + t + case t @ AnnotatedType(t1, ann) => + // Don't map capture sets, since that would implicitly normalize sets that + // are not well-formed. + t.derivedAnnotatedType(apply(t1), ann) + case _ => + mapOver(t) + + /** Fill in capture sets of curried function types from left to right, using + * a combination of the following two rules: + * + * 1. Expand `{c} (x: A) -> (y: B) -> C` + * to `{c} (x: A) -> {c} (y: B) -> C` + * 2. Expand `(x: A) -> (y: B) -> C` where `x` is tracked + * to `(x: A) -> {x} (y: B) -> C` + * + * TODO: Should we also propagate capture sets to the left? + */ + private def expandAbbreviations(using DetachedContext) = new TypeMap: + + /** Propagate `outerCs` as well as all tracked parameters as capture set to the result type + * of the dependent function type `tp`. + */ + def propagateDepFunctionResult(tp: Type, outerCs: CaptureSet): Type = tp match + case RefinedType(parent, nme.apply, rinfo: MethodType) => + val localCs = CaptureSet(rinfo.paramRefs.filter(_.isTracked)*) + val rinfo1 = rinfo.derivedLambdaType( + resType = propagateEnclosing(rinfo.resType, CaptureSet.empty, outerCs ++ localCs)) + if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) + else tp + + /** If `tp` is a function type: + * - add `outerCs` as its capture set, + * - propagate `currentCs`, `outerCs`, and all tracked parameters of `tp` to the right. + */ + def propagateEnclosing(tp: Type, currentCs: CaptureSet, outerCs: CaptureSet): Type = tp match + case tp @ AppliedType(tycon, args) if defn.isFunctionClass(tycon.typeSymbol) => + val tycon1 = this(tycon) + val args1 = args.init.mapConserve(this) + val tp1 = + if args1.exists(!_.captureSet.isAlwaysEmpty) then + val propagated = propagateDepFunctionResult( + depFun(tycon, args1, args.last), currentCs ++ outerCs) + propagated match + case RefinedType(_, _, mt: MethodType) => + if mt.isCaptureDependent then propagated + else + // No need to introduce dependent type, switch back to generic function type + tp.derivedAppliedType(tycon1, args1 :+ mt.resType) + else + val resType1 = propagateEnclosing( + args.last, CaptureSet.empty, currentCs ++ outerCs) + tp.derivedAppliedType(tycon1, args1 :+ resType1) + tp1.capturing(outerCs) + case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionType(tp) => + propagateDepFunctionResult(mapOver(tp), currentCs ++ outerCs) + .capturing(outerCs) + case _ => + mapOver(tp) + + def apply(tp: Type): Type = tp match + case CapturingType(parent, cs) => + tp.derivedCapturingType(propagateEnclosing(parent, cs, CaptureSet.empty), cs) + case _ => + propagateEnclosing(tp, CaptureSet.empty, CaptureSet.empty) + end expandAbbreviations + + private def transformExplicitType(tp: Type, boxed: Boolean)(using Context): Type = + val tp1 = expandThrowsAliases(if boxed then box(tp) else tp) + if tp1 ne tp then capt.println(i"expanded: $tp --> $tp1") + if ctx.settings.YccNoAbbrev.value then tp1 + else expandAbbreviations(tp1) + + /** Transform type of type tree, and remember the transformed type as the type the tree */ + private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = + if !tree.hasRememberedType then + tree.rememberType( + if tree.isInstanceOf[InferredTypeTree] && !exact + then transformInferredType(tree.tpe, boxed) + else transformExplicitType(tree.tpe, boxed)) + + /** Substitute parameter symbols in `from` to paramRefs in corresponding + * method or poly types `to`. We use a single BiTypeMap to do everything. + * @param from a list of lists of type or term parameter symbols of a curried method + * @param to a list of method or poly types corresponding one-to-one to the parameter lists + */ + private class SubstParams(from: List[List[Symbol]], to: List[LambdaType])(using DetachedContext) + extends DeepTypeMap, BiTypeMap: + + def apply(t: Type): Type = t match + case t: NamedType => + val sym = t.symbol + def outer(froms: List[List[Symbol]], tos: List[LambdaType]): Type = + def inner(from: List[Symbol], to: List[ParamRef]): Type = + if from.isEmpty then outer(froms.tail, tos.tail) + else if sym eq from.head then to.head + else inner(from.tail, to.tail) + if tos.isEmpty then t + else inner(froms.head, tos.head.paramRefs) + outer(from, to) + case _ => + mapOver(t) + + def inverse(t: Type): Type = t match + case t: ParamRef => + def recur(from: List[LambdaType], to: List[List[Symbol]]): Type = + if from.isEmpty then t + else if t.binder eq from.head then to.head(t.paramNum).namedType + else recur(from.tail, to.tail) + recur(to, from) + case _ => + mapOver(t) + end SubstParams + + /** Update info of `sym` for CheckCaptures phase only */ + private def updateInfo(sym: Symbol, info: Type)(using Context) = + sym.updateInfoBetween(preRecheckPhase, thisPhase, info) + + def traverse(tree: Tree)(using Context): Unit = + tree match + case tree: DefDef => + if isExcluded(tree.symbol) then + return + tree.tpt match + case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => + tree.paramss.foreach(traverse) + transformTT(tpt, boxed = false, exact = true) + traverse(tree.rhs) + //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") + case _ => + traverseChildren(tree) + case tree @ ValDef(_, tpt: TypeTree, _) => + transformTT(tpt, + boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed + exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set + ) + traverse(tree.rhs) + case tree @ TypeApply(fn, args) => + traverse(fn) + for case arg: TypeTree <- args do + transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + case _ => + traverseChildren(tree) + tree match + case tree: TypeTree => + transformTT(tree, boxed = false, exact = false) // other types are not boxed + case tree: ValOrDefDef => + val sym = tree.symbol + + // replace an existing symbol info with inferred types where capture sets of + // TypeParamRefs and TermParamRefs put in correspondence by BiTypeMaps with the + // capture sets of the types of the method's parameter symbols and result type. + def integrateRT( + info: Type, // symbol info to replace + psymss: List[List[Symbol]], // the local (type and term) parameter symbols corresponding to `info` + prevPsymss: List[List[Symbol]], // the local parameter symbols seen previously in reverse order + prevLambdas: List[LambdaType] // the outer method and polytypes generated previously in reverse order + ): Type = + info match + case mt: MethodOrPoly => + val psyms = psymss.head + mt.companion(mt.paramNames)( + mt1 => + if !psyms.exists(_.isUpdatedAfter(preRecheckPhase)) && !mt.isParamDependent && prevLambdas.isEmpty then + mt.paramInfos + else + val subst = SubstParams(psyms :: prevPsymss, mt1 :: prevLambdas) + psyms.map(psym => subst(psym.info).asInstanceOf[mt.PInfo]), + mt1 => + integrateRT(mt.resType, psymss.tail, psyms :: prevPsymss, mt1 :: prevLambdas) + ) + case info: ExprType => + info.derivedExprType(resType = + integrateRT(info.resType, psymss, prevPsymss, prevLambdas)) + case _ => + val restp = tree.tpt.knownType + if prevLambdas.isEmpty then restp + else SubstParams(prevPsymss, prevLambdas)(restp) + + if tree.tpt.hasRememberedType && !sym.isConstructor then + val newInfo = integrateRT(sym.info, sym.paramSymss, Nil, Nil) + .showing(i"update info $sym: ${sym.info} --> $result", capt) + if newInfo ne sym.info then + val completer = new LazyType: + def complete(denot: SymDenotation)(using Context) = + denot.info = newInfo + recheckDef(tree, sym) + updateInfo(sym, completer) + case tree: Bind => + val sym = tree.symbol + updateInfo(sym, transformInferredType(sym.info, boxed = false)) + case tree: TypeDef => + tree.symbol match + case cls: ClassSymbol => + val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo + if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then + // add capture set to self type of nested classes if no self type is given explicitly + val localRefs = CaptureSet.Var() + val newInfo = ClassInfo(prefix, cls, ps, decls, + CapturingType(cinfo.selfType, localRefs) + .showing(i"inferred self type for $cls: $result", capt)) + updateInfo(cls, newInfo) + cls.thisType.asInstanceOf[ThisType].invalidateCaches() + if cls.is(ModuleClass) then + // if it's a module, the capture set of the module reference is the capture set of the self type + val modul = cls.sourceModule + updateInfo(modul, CapturingType(modul.info, localRefs)) + modul.termRef.invalidateCaches() + case _ => + val info = atPhase(preRecheckPhase)(tree.symbol.info) + val newInfo = transformExplicitType(info, boxed = false) + if newInfo ne info then + updateInfo(tree.symbol, newInfo) + capt.println(i"update info of ${tree.symbol} from $info to $newInfo") + case _ => + end traverse +end Setup diff --git a/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala b/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala new file mode 100644 index 000000000000..dacbd27e0f35 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala @@ -0,0 +1,189 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Symbols.*, SymDenotations.*, Contexts.*, Flags.*, Types.*, Decorators.* +import StdNames.nme +import Names.Name +import NameKinds.DefaultGetterName +import Phases.checkCapturesPhase +import config.Printers.capt + +/** Classification and transformation methods for synthetic + * case class methods that need to be treated specially. + * In particular, compute capturing types for some of these methods which + * have inferred (result-)types that need to be established under separate + * compilation. + */ +object Synthetics: + private def isSyntheticCopyMethod(sym: SymDenotation)(using Context) = + sym.name == nme.copy && sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) + + private def isSyntheticCompanionMethod(sym: SymDenotation, names: Name*)(using Context): Boolean = + names.contains(sym.name) && sym.is(Synthetic) && sym.owner.is(Module) && sym.owner.companionClass.is(Case) + + private def isSyntheticCopyDefaultGetterMethod(sym: SymDenotation)(using Context) = sym.name match + case DefaultGetterName(nme.copy, _) => sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) + case _ => false + + /** Is `sym` a synthetic apply, copy, or copy default getter method? + * The types of these symbols are transformed in a special way without + * looking at the definitions's RHS + */ + def needsTransform(symd: SymDenotation)(using Context): Boolean = + isSyntheticCopyMethod(symd) + || isSyntheticCompanionMethod(symd, nme.apply, nme.unapply) + || isSyntheticCopyDefaultGetterMethod(symd) + || (symd.symbol eq defn.Object_eq) + || (symd.symbol eq defn.Object_ne) + + /** Method is excluded from regular capture checking. + * Excluded are synthetic class members + * - that override a synthesized case class symbol, or + * - the fromProduct method, or + * - members transformed specially as indicated by `needsTransform`. + */ + def isExcluded(sym: Symbol)(using Context): Boolean = + sym.is(Synthetic) + && sym.owner.isClass + && ( defn.caseClassSynthesized.exists( + ccsym => sym.overriddenSymbol(ccsym.owner.asClass) == ccsym) + || isSyntheticCompanionMethod(sym, nme.fromProduct) + || needsTransform(sym)) + + /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. + * An apply method in a case class like this: + * case class CC(a: {d} A, b: B, {*} c: C) + * would get type + * def apply(a': {d} A, b: B, {*} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } + * where `'` is used to indicate the difference between parameter symbol and refinement name. + * Analogous for the copy method. + */ + private def addCaptureDeps(info: Type)(using Context): Type = info match + case info: MethodType => + val trackedParams = info.paramRefs.filter(atPhase(checkCapturesPhase)(_.isTracked)) + def augmentResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = augmentResult(tp.resType)) + case _ => + val refined = trackedParams.foldLeft(tp) { (parent, pref) => + RefinedType(parent, pref.paramName, + CapturingType( + atPhase(ctx.phase.next)(pref.underlying.stripCapturing), + CaptureSet(pref))) + } + CapturingType(refined, CaptureSet(trackedParams*)) + if trackedParams.isEmpty then info + else augmentResult(info).showing(i"augment apply/copy type $info to $result", capt) + case info: PolyType => + info.derivedLambdaType(resType = addCaptureDeps(info.resType)) + case _ => + info + + /** Drop capture dependencies from the type of `apply` or `copy` method of a case class */ + private def dropCaptureDeps(tp: Type)(using Context): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = dropCaptureDeps(tp.resType)) + case CapturingType(parent, _) => + dropCaptureDeps(parent) + case RefinedType(parent, _, _) => + dropCaptureDeps(parent) + case _ => + tp + + /** Add capture information to the type of the default getter of a case class copy method */ + private def addDefaultGetterCapture(info: Type, owner: Symbol, idx: Int)(using Context): Type = info match + case info: MethodOrPoly => + info.derivedLambdaType(resType = addDefaultGetterCapture(info.resType, owner, idx)) + case info: ExprType => + info.derivedExprType(addDefaultGetterCapture(info.resType, owner, idx)) + case EventuallyCapturingType(parent, _) => + addDefaultGetterCapture(parent, owner, idx) + case info @ AnnotatedType(parent, annot) => + info.derivedAnnotatedType(addDefaultGetterCapture(parent, owner, idx), annot) + case _ if idx < owner.asClass.paramGetters.length => + val param = owner.asClass.paramGetters(idx) + val pinfo = param.info + atPhase(ctx.phase.next) { + if pinfo.captureSet.isAlwaysEmpty then info + else CapturingType(pinfo.stripCapturing, CaptureSet(param.termRef)) + } + case _ => + info + + /** Drop capture information from the type of the default getter of a case class copy method */ + private def dropDefaultGetterCapture(info: Type)(using Context): Type = info match + case info: MethodOrPoly => + info.derivedLambdaType(resType = dropDefaultGetterCapture(info.resType)) + case CapturingType(parent, _) => + parent + case info @ AnnotatedType(parent, annot) => + info.derivedAnnotatedType(dropDefaultGetterCapture(parent), annot) + case _ => + info + + /** Augment an unapply of type `(x: C): D` to `(x: {*} C): {x} D` */ + private def addUnapplyCaptures(info: Type)(using Context): Type = info match + case info: MethodType => + val paramInfo :: Nil = info.paramInfos: @unchecked + val newParamInfo = + CapturingType(paramInfo, CaptureSet.universal) + val trackedParam = info.paramRefs.head + def newResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = newResult(tp.resType)) + case _ => + CapturingType(tp, CaptureSet(trackedParam)) + info.derivedLambdaType(paramInfos = newParamInfo :: Nil, resType = newResult(info.resType)) + .showing(i"augment unapply type $info to $result", capt) + case info: PolyType => + info.derivedLambdaType(resType = addUnapplyCaptures(info.resType)) + + /** Drop added capture information from the type of an `unapply` */ + private def dropUnapplyCaptures(info: Type)(using Context): Type = info match + case info: MethodType => + info.paramInfos match + case CapturingType(oldParamInfo, _) :: Nil => + def oldResult(tp: Type): Type = tp match + case tp: MethodOrPoly => + tp.derivedLambdaType(resType = oldResult(tp.resType)) + case CapturingType(tp, _) => + tp + info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) + case _ => + info + case info: PolyType => + info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) + + /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method + * of a case class, transform it to account for capture information. + * The method is run in phase CheckCaptures.Pre + * @pre needsTransform(sym) + */ + def transformToCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match + case DefaultGetterName(nme.copy, n) => + sym.copySymDenotation(info = addDefaultGetterCapture(sym.info, sym.owner, n)) + case nme.unapply => + sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) + case nme.apply | nme.copy => + sym.copySymDenotation(info = addCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = + MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) + + /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method + * of a case class, transform it back to what it was before the CC phase. + * @pre needsTransform(sym) + */ + def transformFromCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match + case DefaultGetterName(nme.copy, n) => + sym.copySymDenotation(info = dropDefaultGetterCapture(sym.info)) + case nme.unapply => + sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) + case nme.apply | nme.copy => + sym.copySymDenotation(info = dropCaptureDeps(sym.info)) + case n if n == nme.eq || n == nme.ne => + sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) + +end Synthetics \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala new file mode 100644 index 000000000000..51b261583feb --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools +package dotc.classpath + +import scala.language.unsafeNulls + +import java.net.URL +import scala.collection.mutable.ArrayBuffer +import scala.collection.immutable.ArraySeq +import dotc.util + +import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation, EfficientClassPath } + +/** + * A classpath unifying multiple class- and sourcepath entries. + * The Classpath can obtain entries for classes and sources independently + * so it tries to do operations quite optimally - iterating only these collections + * which are needed in the given moment and only as far as it's necessary. + * + * @param aggregates classpath instances containing entries which this class processes + */ +case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) + aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClassFile(className)).collectFirst { + case Some(x) => x + } + } + private val packageIndex: collection.mutable.Map[String, Seq[ClassPath]] = collection.mutable.Map() + private def aggregatesForPackage(pkg: PackageName): Seq[ClassPath] = packageIndex.synchronized { + packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) + } + + override def findClass(className: String): Option[ClassRepresentation] = { + val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) + + def findEntry(isSource: Boolean): Option[ClassRepresentation] = + aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst { + case Some(s: SourceFileEntry) if isSource => s + case Some(s: ClassFileEntry) if !isSource => s + } + + val classEntry = findEntry(isSource = false) + val sourceEntry = findEntry(isSource = true) + + (classEntry, sourceEntry) match { + case (Some(c: ClassFileEntry), Some(s: SourceFileEntry)) => Some(ClassAndSourceFilesEntry(c.file, s.file)) + case (c @ Some(_), _) => c + case (_, s) => s + } + } + + override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) + + override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct + + override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*) + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct + aggregatedPackages + } + + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = + getDistinctEntries(_.classes(inPackage)) + + override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = + getDistinctEntries(_.sources(inPackage)) + + override private[dotty] def hasPackage(pkg: PackageName): Boolean = aggregates.exists(_.hasPackage(pkg)) + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = { + val packages: java.util.HashSet[PackageEntry] = new java.util.HashSet[PackageEntry]() + val classesAndSourcesBuffer = collection.mutable.ArrayBuffer[ClassRepresentation]() + val onPackage: PackageEntry => Unit = packages.add(_) + val onClassesAndSources: ClassRepresentation => Unit = classesAndSourcesBuffer += _ + + aggregates.foreach { cp => + try { + cp match { + case ecp: EfficientClassPath => + ecp.list(inPackage, onPackage, onClassesAndSources) + case _ => + val entries = cp.list(inPackage) + entries._1.foreach(entry => packages.add(entry)) + classesAndSourcesBuffer ++= entries._2 + } + } catch { + case ex: java.io.IOException => + val e = FatalError(ex.getMessage) + e.initCause(ex) + throw e + } + } + + val distinctPackages: Seq[PackageEntry] = { + val arr = packages.toArray(new Array[PackageEntry](packages.size())) + ArraySeq.unsafeWrapArray(arr) + } + val distinctClassesAndSources = mergeClassesAndSources(classesAndSourcesBuffer) + ClassPathEntries(distinctPackages, distinctClassesAndSources) + } + + /** + * Returns only one entry for each name. If there's both a source and a class entry, it + * creates an entry containing both of them. If there would be more than one class or source + * entries for the same class it always would use the first entry of each type found on a classpath. + */ + private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = { + // based on the implementation from MergedClassPath + var count = 0 + val indices = util.HashMap[String, Int]() + val mergedEntries = new ArrayBuffer[ClassRepresentation](entries.size) + for { + entry <- entries + } { + val name = entry.name + if (indices.contains(name)) { + val index = indices(name) + val existing = mergedEntries(index) + + if (existing.binary.isEmpty && entry.binary.isDefined) + mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get) + if (existing.source.isEmpty && entry.source.isDefined) + mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get) + } + else { + indices(name) = count + mergedEntries += entry + count += 1 + } + } + if (mergedEntries.isEmpty) Nil else mergedEntries.toIndexedSeq + } + + private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { + val seenNames = util.HashSet[String]() + val entriesBuffer = new ArrayBuffer[EntryType](1024) + for { + cp <- aggregates + entry <- getEntries(cp) if !seenNames.contains(entry.name) + } + { + entriesBuffer += entry + seenNames += entry.name + } + entriesBuffer.toIndexedSeq + } +} + +object AggregateClassPath { + def createAggregate(parts: ClassPath*): ClassPath = { + val elems = new ArrayBuffer[ClassPath]() + parts foreach { + case AggregateClassPath(ps) => elems ++= ps + case p => elems += p + } + if (elems.size == 1) elems.head + else AggregateClassPath(elems.toIndexedSeq) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala new file mode 100644 index 000000000000..176b6acf9c6c --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import dotty.tools.io.AbstractFile +import dotty.tools.io.ClassRepresentation + +case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { + def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) +} + +object ClassPathEntries { + val empty = ClassPathEntries(Seq.empty, Seq.empty) +} + +trait ClassFileEntry extends ClassRepresentation { + def file: AbstractFile +} + +trait SourceFileEntry extends ClassRepresentation { + def file: AbstractFile +} + +case class PackageName(dottedString: String) { + val dirPathTrailingSlashJar: String = FileUtils.dirPathInJar(dottedString) + "/" + + val dirPathTrailingSlash: String = + if (java.io.File.separatorChar == '/') + dirPathTrailingSlashJar + else + FileUtils.dirPath(dottedString) + java.io.File.separator + + def isRoot: Boolean = dottedString.isEmpty + + def entryName(entry: String): String = { + if (isRoot) entry else { + val builder = new java.lang.StringBuilder(dottedString.length + 1 + entry.length) + builder.append(dottedString) + builder.append('.') + builder.append(entry) + builder.toString + } + } +} + +trait PackageEntry { + def name: String +} + +private[dotty] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry { + final def fileName: String = file.name + def name: String = FileUtils.stripClassExtension(file.name) // class name + + def binary: Option[AbstractFile] = Some(file) + def source: Option[AbstractFile] = None +} + +private[dotty] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry { + final def fileName: String = file.name + def name: String = FileUtils.stripSourceExtension(file.name) + + def binary: Option[AbstractFile] = None + def source: Option[AbstractFile] = Some(file) +} + +private[dotty] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation { + final def fileName: String = classFile.name + def name: String = FileUtils.stripClassExtension(classFile.name) + + def binary: Option[AbstractFile] = Some(classFile) + def source: Option[AbstractFile] = Some(srcFile) +} + +private[dotty] case class PackageEntryImpl(name: String) extends PackageEntry + +private[dotty] trait NoSourcePaths { + def asSourcePathString: String = "" + private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = Seq.empty +} + +private[dotty] trait NoClassPaths { + def findClassFile(className: String): Option[AbstractFile] = None + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = Seq.empty +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala new file mode 100644 index 000000000000..ac8b69381938 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import dotty.tools.io.{AbstractFile, VirtualDirectory} +import FileUtils._ +import dotty.tools.io.ClassPath +import dotty.tools.dotc.core.Contexts._ + +/** + * Provides factory methods for classpath. When creating classpath instances for a given path, + * it uses proper type of classpath depending on a types of particular files containing sources or classes. + */ +class ClassPathFactory { + /** + * Create a new classpath based on the abstract file. + */ + def newClassPath(file: AbstractFile)(using Context): ClassPath = ClassPathFactory.newClassPath(file) + + /** + * Creators for sub classpaths which preserve this context. + */ + def sourcesInPath(path: String)(using Context): List[ClassPath] = + for { + file <- expandPath(path, expandStar = false) + dir <- Option(AbstractFile getDirectory file) + } + yield createSourcePath(dir) + + + def expandPath(path: String, expandStar: Boolean = true): List[String] = dotty.tools.io.ClassPath.expandPath(path, expandStar) + + def expandDir(extdir: String): List[String] = dotty.tools.io.ClassPath.expandDir(extdir) + + def contentsOfDirsInPath(path: String)(using Context): List[ClassPath] = + for { + dir <- expandPath(path, expandStar = false) + name <- expandDir(dir) + entry <- Option(AbstractFile.getDirectory(name)) + } + yield newClassPath(entry) + + def classesInExpandedPath(path: String)(using Context): IndexedSeq[ClassPath] = + classesInPathImpl(path, expand = true).toIndexedSeq + + def classesInPath(path: String)(using Context): List[ClassPath] = classesInPathImpl(path, expand = false) + + def classesInManifest(useManifestClassPath: Boolean)(using Context): List[ClassPath] = + if (useManifestClassPath) dotty.tools.io.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url)) + else Nil + + // Internal + protected def classesInPathImpl(path: String, expand: Boolean)(using Context): List[ClassPath] = + for { + file <- expandPath(path, expand) + dir <- { + def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None + Option(AbstractFile.getDirectory(file)).orElse(asImage) + } + } + yield newClassPath(dir) + + private def createSourcePath(file: AbstractFile)(using Context): ClassPath = + if (file.isJarOrZip) + ZipAndJarSourcePathFactory.create(file) + else if (file.isDirectory) + new DirectorySourcePath(file.file) + else + sys.error(s"Unsupported sourcepath element: $file") +} + +object ClassPathFactory { + def newClassPath(file: AbstractFile)(using Context): ClassPath = file match { + case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) + case _ => + if (file.isJarOrZip) + ZipAndJarClassPathFactory.create(file) + else if (file.isDirectory) + new DirectoryClassPath(file.file) + else + sys.error(s"Unsupported classpath element: $file") + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala new file mode 100644 index 000000000000..a5678970411b --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala @@ -0,0 +1,313 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import scala.language.unsafeNulls + +import java.io.{File => JFile} +import java.net.URL +import java.nio.file.{FileSystems, Files} + +import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} +import dotty.tools.io.{AbstractFile, PlainFile, ClassPath, ClassRepresentation, EfficientClassPath, JDK9Reflectors} +import FileUtils._ +import PlainFile.toPlainFile + +import scala.jdk.CollectionConverters._ +import scala.collection.immutable.ArraySeq +import scala.util.control.NonFatal +import language.experimental.pureFunctions + +/** + * A trait allowing to look for classpath entries in directories. It provides common logic for + * classes handling class and source files. + * It makes use of the fact that in the case of nested directories it's easy to find a file + * when we have a name of a package. + * It abstracts over the file representation to work with both JFile and AbstractFile. + */ +trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { + type F + + val dir: F + + protected def emptyFiles: Array[F] // avoids reifying ClassTag[F] + protected def getSubDir(dirName: String): Option[F] + protected def listChildren(dir: F, filter: Option[F -> Boolean] = (None: Option[F -> Boolean])): Array[F] // !cc! need explicit typing of default argument + protected def getName(f: F): String + protected def toAbstractFile(f: F): AbstractFile + protected def isPackage(f: F): Boolean + + protected def createFileEntry(file: AbstractFile): FileEntryType + protected def isMatchingFile(f: F): Boolean + + private def getDirectory(forPackage: PackageName): Option[F] = + if (forPackage.isRoot) + Some(dir) + else + getSubDir(forPackage.dirPathTrailingSlash) + + override private[dotty] def hasPackage(pkg: PackageName): Boolean = getDirectory(pkg).isDefined + + private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + val dirForPackage = getDirectory(inPackage) + val nestedDirs: Array[F] = dirForPackage match { + case None => emptyFiles + case Some(directory) => listChildren(directory, Some(isPackage)) + } + ArraySeq.unsafeWrapArray(nestedDirs).map(f => PackageEntryImpl(inPackage.entryName(getName(f)))) + } + + protected def files(inPackage: PackageName): Seq[FileEntryType] = { + val dirForPackage = getDirectory(inPackage) + val files: Array[F] = dirForPackage match { + case None => emptyFiles + case Some(directory) => listChildren(directory, Some(isMatchingFile)) + } + files.iterator.map(f => createFileEntry(toAbstractFile(f))).toSeq + } + + override def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = { + val dirForPackage = getDirectory(inPackage) + dirForPackage match { + case None => + case Some(directory) => + for (file <- listChildren(directory)) { + if (isPackage(file)) + onPackageEntry(PackageEntryImpl(inPackage.entryName(getName(file)))) + else if (isMatchingFile(file)) + onClassesAndSources(createFileEntry(toAbstractFile(file))) + } + } + } +} + +trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] { + type F = JFile + + protected def emptyFiles: Array[JFile] = Array.empty + protected def getSubDir(packageDirName: String): Option[JFile] = { + val packageDir = new JFile(dir, packageDirName) + if (packageDir.exists && packageDir.isDirectory) Some(packageDir) + else None + } + protected def listChildren(dir: JFile, filter: Option[JFile -> Boolean]): Array[JFile] = { + val listing = filter match { + case Some(f) => dir.listFiles(mkFileFilter(f)) + case None => dir.listFiles() + } + + if (listing != null) { + // Sort by file name for stable order of directory .class entries in package scope. + // This gives stable results ordering of base type sequences for unrelated classes + // with the same base type depth. + // + // Notably, this will stably infer`Product with Serializable` + // as the type of `case class C(); case class D(); List(C(), D()).head`, rather than the opposite order. + // On Mac, the HFS performs this sorting transparently, but on Linux the order is unspecified. + // + // Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only + // intended to improve determinism of the compiler for compiler hackers. + java.util.Arrays.sort(listing, + new java.util.Comparator[JFile] { + def compare(o1: JFile, o2: JFile) = o1.getName.compareTo(o2.getName) + }) + listing + } + else Array() + } + protected def getName(f: JFile): String = f.getName + protected def toAbstractFile(f: JFile): AbstractFile = f.toPath.toPlainFile + protected def isPackage(f: JFile): Boolean = f.isPackage + + assert(dir != null, "Directory file in DirectoryFileLookup cannot be null") + + def asURLs: Seq[URL] = Seq(dir.toURI.toURL) + def asClassPathStrings: Seq[String] = Seq(dir.getPath) +} + +object JrtClassPath { + import java.nio.file._, java.net.URI + def apply(release: Option[String]): Option[ClassPath] = { + import scala.util.Properties._ + if (!isJavaAtLeast("9")) None + else { + // Longer term we'd like an official API for this in the JDK + // Discussion: http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/thread.html#11738 + + val currentMajorVersion: Int = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() + release match { + case Some(v) if v.toInt < currentMajorVersion => + try { + val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") + if (Files.notExists(ctSym)) None + else Some(new CtSymClassPath(ctSym, v.toInt)) + } catch { + case NonFatal(_) => None + } + case _ => + try { + val fs = FileSystems.getFileSystem(URI.create("jrt:/")) + Some(new JrtClassPath(fs)) + } catch { + case _: ProviderNotFoundException | _: FileSystemNotFoundException => None + } + } + } + } +} + +/** + * Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220) + * + * https://bugs.openjdk.java.net/browse/JDK-8066492 is the most up to date reference + * for the structure of the jrt:// filesystem. + * + * The implementation assumes that no classes exist in the empty package. + */ +final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths { + import java.nio.file.Path, java.nio.file._ + type F = Path + private val dir: Path = fs.getPath("/packages") + + // e.g. "java.lang" -> Seq("/modules/java.base") + private val packageToModuleBases: Map[String, Seq[Path]] = { + val ps = Files.newDirectoryStream(dir).iterator().asScala + def lookup(pack: Path): Seq[Path] = + Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList + ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap + } + + /** Empty string represents root package */ + override private[dotty] def hasPackage(pkg: PackageName): Boolean = packageToModuleBases.contains(pkg.dottedString) + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = + packageToModuleBases.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector + + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = + if (inPackage.isRoot) Nil + else + packageToModuleBases.getOrElse(inPackage.dottedString, Nil).flatMap(x => + Files.list(x.resolve(inPackage.dirPathTrailingSlash)).iterator().asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x => + ClassFileEntryImpl(x.toPlainFile)).toVector + + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = + if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) + else ClassPathEntries(packages(inPackage), classes(inPackage)) + + def asURLs: Seq[URL] = Seq(new URL("jrt:/")) + // We don't yet have a scheme to represent the JDK modules in our `-classpath`. + // java models them as entries in the new "module path", we'll probably need to follow this. + def asClassPathStrings: Seq[String] = Nil + + def findClassFile(className: String): Option[AbstractFile] = + if (!className.contains(".")) None + else { + val (inPackage, _) = separatePkgAndClassNames(className) + packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{ x => + val file = x.resolve(FileUtils.dirPath(className) + ".class") + if (Files.exists(file)) file.toPlainFile :: Nil else Nil + }.take(1).toList.headOption + } +} + +/** + * Implementation `ClassPath` based on the \$JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 + */ +final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { + import java.nio.file.Path, java.nio.file._ + + private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader) + private val root: Path = fileSystem.getRootDirectories.iterator.next + private val roots = Files.newDirectoryStream(root).iterator.asScala.toList + + // http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/011737.html + private def codeFor(major: Int): String = if (major < 10) major.toString else ('A' + (major - 10)).toChar.toString + + private val releaseCode: String = codeFor(release) + private def fileNameMatchesRelease(fileName: String) = !fileName.contains("-") && fileName.contains(releaseCode) // exclude `9-modules` + private val rootsForRelease: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString)) + + // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) + private val packageIndex: scala.collection.Map[String, scala.collection.Seq[Path]] = { + val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() + val isJava12OrHigher = scala.util.Properties.isJavaAtLeast("12") + rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p => + val moduleNamePathElementCount = if (isJava12OrHigher) 1 else 0 + if (p.getNameCount > root.getNameCount + moduleNamePathElementCount) { + val packageDotted = p.subpath(moduleNamePathElementCount + root.getNameCount, p.getNameCount).toString.replace('/', '.') + index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p + } + }) + index + } + + /** Empty string represents root package */ + override private[dotty] def hasPackage(pkg: PackageName) = packageIndex.contains(pkg.dottedString) + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + packageIndex.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector + } + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = { + if (inPackage.isRoot) Nil + else { + val sigFiles = packageIndex.getOrElse(inPackage.dottedString, Nil).iterator.flatMap(p => + Files.list(p).iterator.asScala.filter(_.getFileName.toString.endsWith(".sig"))) + sigFiles.map(f => ClassFileEntryImpl(f.toPlainFile)).toVector + } + } + + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = + if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) + else ClassPathEntries(packages(inPackage), classes(inPackage)) + + def asURLs: Seq[URL] = Nil + def asClassPathStrings: Seq[String] = Nil + def findClassFile(className: String): Option[AbstractFile] = { + if (!className.contains(".")) None + else { + val (inPackage, classSimpleName) = separatePkgAndClassNames(className) + packageIndex.getOrElse(inPackage, Nil).iterator.flatMap { p => + val path = p.resolve(classSimpleName + ".sig") + if (Files.exists(path)) path.toPlainFile :: Nil else Nil + }.take(1).toList.headOption + } + } +} + +case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { + override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply + + def findClassFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + val classFile = new JFile(dir, relativePath + ".class") + if (classFile.exists) { + Some(classFile.toPath.toPlainFile) + } + else None + } + + protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) + protected def isMatchingFile(f: JFile): Boolean = f.isClass + + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) +} + +case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths { + def asSourcePathString: String = asClassPathString + + protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file) + protected def isMatchingFile(f: JFile): Boolean = endsScalaOrJava(f.getName) + + override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl.apply + + private def findSourceFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + val sourceFile = LazyList("scala", "java") + .map(ext => new JFile(dir, relativePath + "." + ext)) + .collectFirst { case file if file.exists() => file } + + sourceFile.map(_.toPath.toPlainFile) + } + + private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala new file mode 100644 index 000000000000..0f5ac16b40bf --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools +package dotc.classpath + +import scala.language.unsafeNulls + +import java.io.{File => JFile, FileFilter} +import java.net.URL +import dotty.tools.io.AbstractFile +import language.experimental.pureFunctions + +/** + * Common methods related to Java files and abstract files used in the context of classpath + */ +object FileUtils { + extension (file: AbstractFile) { + def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) + + def isClass: Boolean = !file.isDirectory && file.hasExtension("class") && !file.name.endsWith("$class.class") + // FIXME: drop last condition when we stop being compatible with Scala 2.11 + + def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) + + // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? + def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") + + /** + * Safe method returning a sequence containing one URL representing this file, when underlying file exists, + * and returning given default value in other case + */ + def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) + } + + extension (file: JFile) { + def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) + + def isClass: Boolean = file.isFile && file.getName.endsWith(".class") && !file.getName.endsWith("$class.class") + // FIXME: drop last condition when we stop being compatible with Scala 2.11 + } + + private val SUFFIX_CLASS = ".class" + private val SUFFIX_SCALA = ".scala" + private val SUFFIX_JAVA = ".java" + private val SUFFIX_SIG = ".sig" + + def stripSourceExtension(fileName: String): String = + if (endsScala(fileName)) stripClassExtension(fileName) + else if (endsJava(fileName)) stripJavaExtension(fileName) + else throw new FatalError("Unexpected source file ending: " + fileName) + + def dirPath(forPackage: String): String = forPackage.replace('.', JFile.separatorChar) + + def dirPathInJar(forPackage: String): String = forPackage.replace('.', '/') + + inline private def ends (filename:String, suffix:String) = filename.endsWith(suffix) && filename.length > suffix.length + + def endsClass(fileName: String): Boolean = + ends (fileName, SUFFIX_CLASS) || fileName.endsWith(SUFFIX_SIG) + + def endsScalaOrJava(fileName: String): Boolean = + endsScala(fileName) || endsJava(fileName) + + def endsJava(fileName: String): Boolean = + ends (fileName, SUFFIX_JAVA) + + def endsScala(fileName: String): Boolean = + ends (fileName, SUFFIX_SCALA) + + def stripClassExtension(fileName: String): String = + fileName.substring(0, fileName.lastIndexOf('.')) + + def stripJavaExtension(fileName: String): String = + fileName.substring(0, fileName.length - 5) // equivalent of fileName.length - SUFFIX_JAVA.length + + // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed + // because then some tests in partest don't pass + def mayBeValidPackage(dirName: String): Boolean = + (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.') + + def mkFileFilter(f: JFile -> Boolean): FileFilter = new FileFilter { + def accept(pathname: JFile): Boolean = f(pathname) + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala new file mode 100644 index 000000000000..ea7412f15d8a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import dotty.tools.io.ClassPath.RootPackage + +/** + * Common methods related to package names represented as String + */ +object PackageNameUtils { + + /** + * @param fullClassName full class name with package + * @return (package, simple class name) + */ + inline def separatePkgAndClassNames(fullClassName: String): (String, String) = { + val lastDotIndex = fullClassName.lastIndexOf('.') + if (lastDotIndex == -1) + (RootPackage, fullClassName) + else + (fullClassName.substring(0, lastDotIndex).nn, fullClassName.substring(lastDotIndex + 1).nn) + } + + def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." + + /** + * `true` if `packageDottedName` is a package directly nested in `inPackage`, for example: + * - `packageContains("scala", "scala.collection")` + * - `packageContains("", "scala")` + */ + def packageContains(inPackage: String, packageDottedName: String) = { + if (packageDottedName.contains(".")) + packageDottedName.startsWith(inPackage) && packageDottedName.lastIndexOf('.') == inPackage.length + else inPackage == "" + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala new file mode 100644 index 000000000000..ac80d543b539 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala @@ -0,0 +1,55 @@ +package dotty.tools.dotc.classpath + +import scala.language.unsafeNulls + +import dotty.tools.io.ClassRepresentation +import dotty.tools.io.{AbstractFile, VirtualDirectory} +import FileUtils._ +import java.net.URL + +import dotty.tools.io.ClassPath +import language.experimental.pureFunctions + +case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { + type F = AbstractFile + + // From AbstractFileClassLoader + private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = { + var file: AbstractFile = base + val dirParts = pathParts.init.iterator + while (dirParts.hasNext) { + val dirPart = dirParts.next + file = file.lookupName(dirPart, directory = true) + if (file == null) + return null + } + file.lookupName(pathParts.last, directory = directory) + } + + protected def emptyFiles: Array[AbstractFile] = Array.empty + protected def getSubDir(packageDirName: String): Option[AbstractFile] = + Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true)) + protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile -> Boolean]): Array[F] = filter match { + case Some(f) => dir.iterator.filter(f).toArray + case _ => dir.toArray + } + def getName(f: AbstractFile): String = f.name + def toAbstractFile(f: AbstractFile): AbstractFile = f + def isPackage(f: AbstractFile): Boolean = f.isPackage + + // mimic the behavior of the old nsc.util.DirectoryClassPath + def asURLs: Seq[URL] = Seq(new URL(dir.name)) + def asClassPathStrings: Seq[String] = Seq(dir.path) + + override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply + + def findClassFile(className: String): Option[AbstractFile] = { + val relativePath = FileUtils.dirPath(className) + ".class" + Option(lookupPath(dir)(relativePath.split(java.io.File.separator).toIndexedSeq, directory = false)) + } + + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) + + protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) + protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala new file mode 100644 index 000000000000..865f95551a0b --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -0,0 +1,205 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc +package classpath + +import scala.language.unsafeNulls + +import java.io.File +import java.net.URL +import java.nio.file.Files +import java.nio.file.attribute.{BasicFileAttributes, FileTime} + +import scala.annotation.tailrec +import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources} +import dotty.tools.dotc.core.Contexts._ +import FileUtils._ + +/** + * A trait providing an optional cache for classpath entries obtained from zip and jar files. + * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE + * when there are a lot of projects having a lot of common dependencies. + */ +sealed trait ZipAndJarFileLookupFactory { + private val cache = new FileBasedCache[ClassPath] + + def create(zipFile: AbstractFile)(using Context): ClassPath = + val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) + if (ctx.settings.YdisableFlatCpCaching.value || zipFile.file == null) createForZipFile(zipFile, release) + else createUsingCache(zipFile, release) + + protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath + + private def createUsingCache(zipFile: AbstractFile, release: Option[String]): ClassPath = + cache.getOrCreate(zipFile.file.toPath, () => createForZipFile(zipFile, release)) +} + +/** + * Manages creation of classpath for class files placed in zip and jar files. + * It should be the only way of creating them as it provides caching. + */ +object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { + private case class ZipArchiveClassPath(zipFile: File, override val release: Option[String]) + extends ZipArchiveFileLookup[ClassFileEntryImpl] + with NoSourcePaths { + + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + file(PackageName(pkg), simpleClassName + ".class").map(_.file) + } + + // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. + override def findClass(className: String): Option[ClassRepresentation] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + file(PackageName(pkg), simpleClassName + ".class") + } + + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) + + override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file) + override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass + } + + /** + * This type of classpath is closely related to the support for JSR-223. + * Its usage can be observed e.g. when running: + * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala + * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: + * Name: scala/Function2$mcFJD$sp.class + */ + private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths { + override def findClassFile(className: String): Option[AbstractFile] = { + val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) + classes(PackageName(pkg)).find(_.name == simpleClassName).map(_.file) + } + + override def asClassPathStrings: Seq[String] = Seq(file.path) + + override def asURLs: Seq[URL] = file.toURLs() + + import ManifestResourcesClassPath.PackageFileInfo + import ManifestResourcesClassPath.PackageInfo + + /** + * A cache mapping package name to abstract file for package directory and subpackages of given package. + * + * ManifestResources can iterate through the collections of entries from e.g. remote jar file. + * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into + * given package, when it's needed. On the other hand we can iterate over entries to get + * AbstractFiles, iterate over entries of these files etc. + * + * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time, + * when we need subpackages of a given package or its classes, we traverse once and cache only packages. + * Classes for given package can be then easily loaded when they are needed. + */ + private lazy val cachedPackages: util.HashMap[String, PackageFileInfo] = { + val packages = util.HashMap[String, PackageFileInfo]() + + def getSubpackages(dir: AbstractFile): List[AbstractFile] = + (for (file <- dir if file.isPackage) yield file).toList + + @tailrec + def traverse(packagePrefix: String, + filesForPrefix: List[AbstractFile], + subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match { + case pkgFile :: remainingFiles => + val subpackages = getSubpackages(pkgFile) + val fullPkgName = packagePrefix + pkgFile.name + packages(fullPkgName) = PackageFileInfo(pkgFile, subpackages) + val newPackagePrefix = fullPkgName + "." + subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) + traverse(packagePrefix, remainingFiles, subpackagesQueue) + case Nil if subpackagesQueue.nonEmpty => + val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue() + traverse(packagePrefix, filesForPrefix, subpackagesQueue) + case _ => + } + + val subpackages = getSubpackages(file) + packages(ClassPath.RootPackage) = PackageFileInfo(file, subpackages) + traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue()) + packages + } + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = cachedPackages.get(inPackage.dottedString) match { + case None => Seq.empty + case Some(PackageFileInfo(_, subpackages)) => + subpackages.map(packageFile => PackageEntryImpl(inPackage.entryName(packageFile.name))) + } + + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = cachedPackages.get(inPackage.dottedString) match { + case None => Seq.empty + case Some(PackageFileInfo(pkg, _)) => + (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file)).toSeq + } + + override private[dotty] def hasPackage(pkg: PackageName) = cachedPackages.contains(pkg.dottedString) + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage)) + } + + private object ManifestResourcesClassPath { + case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile]) + case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) + } + + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = + if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) + else ZipArchiveClassPath(zipFile.file, release) + + private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { + case manifestRes: ManifestResources => + ManifestResourcesClassPath(manifestRes) + case _ => + val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile" + throw new IllegalArgumentException(errorMsg) + } +} + +/** + * Manages creation of classpath for source files placed in zip and jar files. + * It should be the only way of creating them as it provides caching. + */ +object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { + private case class ZipArchiveSourcePath(zipFile: File) + extends ZipArchiveFileLookup[SourceFileEntryImpl] + with NoClassPaths { + + def release: Option[String] = None + + override def asSourcePathString: String = asClassPathString + + override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) + + override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file) + override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource + } + + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) +} + +final class FileBasedCache[T] { + private case class Stamp(lastModified: FileTime, fileKey: Object) + private val cache = collection.mutable.Map.empty[java.nio.file.Path, (Stamp, T)] + + def getOrCreate(path: java.nio.file.Path, create: () => T): T = cache.synchronized { + val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) + val lastModified = attrs.lastModifiedTime() + // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp + val fileKey = attrs.fileKey() + val stamp = Stamp(lastModified, fileKey) + cache.get(path) match { + case Some((cachedStamp, cached)) if cachedStamp == stamp => cached + case _ => + val value = create() + cache.put(path, (stamp, value)) + value + } + } + + def clear(): Unit = cache.synchronized { + // TODO support closing + // cache.valuesIterator.foreach(_.close()) + cache.clear() + } +} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala new file mode 100644 index 000000000000..e241feee8244 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2014 Contributor. All rights reserved. + */ +package dotty.tools.dotc.classpath + +import scala.language.unsafeNulls + +import java.io.File +import java.net.URL + +import dotty.tools.io.{ AbstractFile, FileZipArchive } +import FileUtils._ +import dotty.tools.io.{EfficientClassPath, ClassRepresentation} + +/** + * A trait allowing to look for classpath entries of given type in zip and jar files. + * It provides common logic for classes handling class and source files. + * It's aware of things like e.g. META-INF directory which is correctly skipped. + */ +trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { + val zipFile: File + def release: Option[String] + + assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null") + + override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) + override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) + + private val archive = new FileZipArchive(zipFile.toPath, release) + + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + for { + dirEntry <- findDirEntry(inPackage).toSeq + entry <- dirEntry.iterator if entry.isPackage + } + yield PackageEntryImpl(inPackage.entryName(entry.name)) + } + + protected def files(inPackage: PackageName): Seq[FileEntryType] = + for { + dirEntry <- findDirEntry(inPackage).toSeq + entry <- dirEntry.iterator if isRequiredFileType(entry) + } + yield createFileEntry(entry) + + protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = + for { + dirEntry <- findDirEntry(inPackage) + entry <- Option(dirEntry.lookupName(name, directory = false)) + if isRequiredFileType(entry) + } + yield createFileEntry(entry) + + override def hasPackage(pkg: PackageName) = findDirEntry(pkg).isDefined + def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = + findDirEntry(inPackage) match { + case Some(dirEntry) => + for (entry <- dirEntry.iterator) { + if (entry.isPackage) + onPackageEntry(PackageEntryImpl(inPackage.entryName(entry.name))) + else if (isRequiredFileType(entry)) + onClassesAndSources(createFileEntry(entry)) + } + case None => + } + + private def findDirEntry(pkg: PackageName): Option[archive.DirEntry] = + archive.allDirs.get(pkg.dirPathTrailingSlashJar) + + protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType + protected def isRequiredFileType(file: AbstractFile): Boolean +} diff --git a/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala new file mode 100644 index 000000000000..68c900e405da --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala @@ -0,0 +1,198 @@ +package dotty.tools.dotc +package config + +import scala.language.unsafeNulls + +import Settings._ +import core.Contexts._ +import printing.Highlighting + +import scala.util.chaining.given +import scala.PartialFunction.cond + +trait CliCommand: + + type ConcreteSettings <: CommonScalaSettings with Settings.SettingGroup + + def versionMsg: String + + def ifErrorsMsg: String + + /** The name of the command */ + def cmdName: String + + def isHelpFlag(using settings: ConcreteSettings)(using SettingsState): Boolean + + def helpMsg(using settings: ConcreteSettings)(using SettingsState, Context): String + + private def explainAdvanced = """ + |-- Notes on option parsing -- + |Boolean settings are always false unless set. + |Where multiple values are accepted, they should be comma-separated. + | example: -Xplugin:plugin1,plugin2 + | means one or a comma-separated list of: + | - (partial) phase names with an optional "+" suffix to include the next phase + | - the string "all" + | example: -Xprint:all prints all phases. + | example: -Xprint:typer,mixin prints the typer and mixin phases. + | example: -Ylog:erasure+ logs the erasure phase and the phase after the erasure phase. + | This is useful because during the tree transform of phase X, we often + | already are in phase X + 1. + """ + + /** Distill arguments into summary detailing settings, errors and files to main */ + def distill(args: Array[String], sg: Settings.SettingGroup)(ss: SettingsState = sg.defaultState)(using Context): ArgsSummary = + + // expand out @filename to the contents of that filename + def expandedArguments = args.toList flatMap { + case x if x startsWith "@" => CommandLineParser.expandArg(x) + case x => List(x) + } + + sg.processArguments(expandedArguments, processAll = true, settingsState = ss) + end distill + + /** Creates a help message for a subset of options based on cond */ + protected def availableOptionsMsg(p: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = + // result is (Option Name, descrption\ndefault: value\nchoices: x, y, z + def help(s: Setting[?]): (String, String) = + // For now, skip the default values that do not make sense for the end user, such as 'false' for the version command. + def defaultValue = s.default match + case _: Int | _: String => s.default.toString + case _ => "" + val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices ${s.legalChoices}" else "") + (s.name, info.filter(_.nonEmpty).mkString("\n")) + end help + + val ss = settings.allSettings.filter(p).toList.sortBy(_.name) + val formatter = Columnator("", "", maxField = 30) + val fresh = ContextBase().initialCtx.fresh.setSettings(summon[SettingsState]) + formatter(List(ss.map(help) :+ ("@", "A text file containing compiler arguments (options and source files).")))(using fresh) + end availableOptionsMsg + + protected def shortUsage: String = s"Usage: $cmdName " + + protected def createUsageMsg(label: String, shouldExplain: Boolean, cond: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = + val prefix = List( + Some(shortUsage), + Some(explainAdvanced).filter(_ => shouldExplain), + Some(label + " options include:") + ).flatten.mkString("\n") + + prefix + "\n" + availableOptionsMsg(cond) + + protected def isStandard(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + !isVerbose(s) && !isWarning(s) && !isAdvanced(s) && !isPrivate(s) || s.name == "-Werror" || s.name == "-Wconf" + protected def isVerbose(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-V") && s.name != "-V" + protected def isWarning(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-W") && s.name != "-W" || s.name == "-Xlint" + protected def isAdvanced(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-X") && s.name != "-X" + protected def isPrivate(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-Y") && s.name != "-Y" + protected def shortHelp(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): String = + s.description.linesIterator.next() + protected def isHelping(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + cond(s.value) { + case ss: List[?] if s.isMultivalue => ss.contains("help") + case s: String => "help" == s + } + + /** Messages explaining usage and options */ + protected def usageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("where possible standard", shouldExplain = false, isStandard) + protected def vusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible verbose", shouldExplain = true, isVerbose) + protected def wusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible warning", shouldExplain = true, isWarning) + protected def xusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible advanced", shouldExplain = true, isAdvanced) + protected def yusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible private", shouldExplain = true, isPrivate) + + /** Used for the formatted output of -Xshow-phases */ + protected def phasesMessage(using Context): String = + val phases = new Compiler().phases + val formatter = Columnator("phase name", "description", maxField = 25) + formatter(phases.map(mega => mega.map(p => (p.phaseName, p.description)))) + + /** Provide usage feedback on argument summary, assuming that all settings + * are already applied in context. + * @return Either Some list of files passed as arguments or None if further processing should be interrupted. + */ + def checkUsage(summary: ArgsSummary, sourcesRequired: Boolean)(using settings: ConcreteSettings)(using SettingsState, Context): Option[List[String]] = + // Print all warnings encountered during arguments parsing + summary.warnings.foreach(report.warning(_)) + + if summary.errors.nonEmpty then + summary.errors foreach (report.error(_)) + report.echo(ifErrorsMsg) + None + else if settings.version.value then + report.echo(versionMsg) + None + else if isHelpFlag then + report.echo(helpMsg) + None + else if (sourcesRequired && summary.arguments.isEmpty) + report.echo(usageMessage) + None + else + Some(summary.arguments) + + extension [T](setting: Setting[T]) + protected def value(using ss: SettingsState): T = setting.valueIn(ss) + + extension (s: String) + def padLeft(width: Int): String = String.format(s"%${width}s", s) + + // Formatting for -help and -Vphases in two columns, handling long field1 and wrapping long field2 + class Columnator(heading1: String, heading2: String, maxField: Int, separation: Int = 2): + def apply(texts: List[List[(String, String)]])(using Context): String = StringBuilder().tap(columnate(_, texts)).toString + + private def columnate(sb: StringBuilder, texts: List[List[(String, String)]])(using Context): Unit = + import Highlighting.* + val colors = Seq(Green(_), Yellow(_), Magenta(_), Cyan(_), Red(_)) + val nocolor = texts.length == 1 + def color(index: Int): String => Highlight = if nocolor then NoColor(_) else colors(index % colors.length) + val maxCol = ctx.settings.pageWidth.value + val field1 = maxField.min(texts.flatten.map(_._1.length).filter(_ < maxField).max) // widest field under maxField + val field2 = if field1 + separation + maxField < maxCol then maxCol - field1 - separation else 0 // skinny window -> terminal wrap + val separator = " " * separation + val EOL = "\n" + def formatField1(text: String): String = if text.length <= field1 then text.padLeft(field1) else text + EOL + "".padLeft(field1) + def formatField2(text: String): String = + def loopOverField2(fld: String): List[String] = + if field2 == 0 || fld.length <= field2 then List(fld) + else + fld.lastIndexOf(" ", field2) match + case -1 => List(fld) + case i => val (prefix, rest) = fld.splitAt(i) ; prefix :: loopOverField2(rest.trim) + text.split("\n").toList.flatMap(loopOverField2).filter(_.nonEmpty).mkString(EOL + "".padLeft(field1) + separator) + end formatField2 + def format(first: String, second: String, index: Int, colorPicker: Int => String => Highlight) = + sb.append(colorPicker(index)(formatField1(first)).show) + .append(separator) + .append(formatField2(second)) + .append(EOL): Unit + def fancy(first: String, second: String, index: Int) = format(first, second, index, color) + def plain(first: String, second: String) = format(first, second, 0, _ => NoColor(_)) + + if heading1.nonEmpty then + plain(heading1, heading2) + plain("-" * heading1.length, "-" * heading2.length) + + def emit(index: Int)(textPair: (String, String)): Unit = fancy(textPair._1, textPair._2, index) + def group(index: Int)(body: Int => Unit): Unit = + if !ctx.useColors then plain(s"{", "") + body(index) + if !ctx.useColors then plain(s"}", "") + + texts.zipWithIndex.foreach { (text, index) => + text match + case List(single) => emit(index)(single) + case Nil => + case mega => group(index)(i => mega.foreach(emit(i))) + } + end Columnator diff --git a/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala b/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala new file mode 100644 index 000000000000..2e76561c9913 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala @@ -0,0 +1,125 @@ +package dotty.tools.dotc.config + +import java.lang.Character.isWhitespace +import java.nio.file.{Files, Paths} +import scala.annotation.tailrec +import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters.* + +/** Split a line of text using shell conventions. + */ +object CommandLineParser: + inline private val DQ = '"' + inline private val SQ = '\'' + inline private val EOF = -1 + + /** Split the line into tokens separated by whitespace. + * + * Single or double quotes can be embedded to preserve internal whitespace: + * + * `""" echo "hello, world!" """` => "echo" :: "hello, world!" :: Nil + * `""" echo hello,' 'world! """` => "echo" :: "hello, world!" :: Nil + * `""" echo \"hello, world!\" """` => "echo" :: "\"hello," :: "world!\"" :: Nil + * + * The embedded quotes are stripped. Escaping backslash is not stripped. + * + * Invoke `errorFn` with a descriptive message if an end quote is missing. + */ + def tokenize(line: String, errorFn: String => Unit): List[String] = + + var accum: List[String] = Nil + + var pos = 0 + var start = 0 + val qpos = new ArrayBuffer[Int](16) // positions of paired quotes in current token + + inline def cur = if done then EOF else line.charAt(pos): Int + inline def bump() = pos += 1 + inline def done = pos >= line.length + + // Skip to the given unescaped end quote; false on no more input. + def skipToEndQuote(q: Int): Boolean = + var escaped = false + def terminal = cur match + case _ if escaped => escaped = false ; false + case '\\' => escaped = true ; false + case `q` | EOF => true + case _ => false + while !terminal do bump() + !done + + // Skip to the next whitespace word boundary; record unescaped embedded quotes; false on missing quote. + def skipToDelim(): Boolean = + var escaped = false + inline def quote() = { qpos += pos ; bump() } + @tailrec def advance(): Boolean = cur match + case _ if escaped => escaped = false ; bump() ; advance() + case '\\' => escaped = true ; bump() ; advance() + case q @ (DQ | SQ) => { quote() ; skipToEndQuote(q) } && { quote() ; advance() } + case EOF => true + case c if isWhitespace(c) => true + case _ => bump(); advance() + advance() + + def copyText(): String = + val buf = new java.lang.StringBuilder + var p = start + var i = 0 + while p < pos do + if i >= qpos.size then + buf.append(line, p, pos) + p = pos + else if p == qpos(i) then + buf.append(line, qpos(i)+1, qpos(i+1)) + p = qpos(i+1)+1 + i += 2 + else + buf.append(line, p, qpos(i)) + p = qpos(i) + buf.toString + + // the current token, stripped of any embedded quotes. + def text(): String = + val res = + if qpos.isEmpty then line.substring(start, pos) + else if qpos(0) == start && qpos(1) == pos then line.substring(start+1, pos-1) + else copyText() + qpos.clear() + res.nn + + inline def badquote() = errorFn(s"Unmatched quote [${qpos.last}](${line.charAt(qpos.last)})") + + inline def skipWhitespace() = while isWhitespace(cur) do bump() + + @tailrec def loop(): List[String] = + skipWhitespace() + start = pos + if done then + accum.reverse + else if !skipToDelim() then + badquote() + Nil + else + accum ::= text() + loop() + end loop + + loop() + end tokenize + + def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) + + /** Expands all arguments starting with @ to the contents of the file named like each argument. + */ + def expandArg(arg: String): List[String] = + val path = Paths.get(arg.stripPrefix("@")) + if !Files.exists(path) then + System.err.nn.println(s"Argument file ${path.nn.getFileName} could not be found") + Nil + else + def stripComment(s: String) = s.indexOf('#') match { case -1 => s case i => s.substring(0, i) } + val lines = Files.readAllLines(path).nn + val params = lines.asScala.map(stripComment).filter(!_.nn.isEmpty).mkString(" ") + tokenize(params) + + class ParseException(msg: String) extends RuntimeException(msg) diff --git a/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala new file mode 100644 index 000000000000..41e123472a75 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala @@ -0,0 +1,26 @@ +package dotty.tools.dotc +package config + +import Settings._ +import core.Contexts._ + +abstract class CompilerCommand extends CliCommand: + type ConcreteSettings = ScalaSettings + + final def helpMsg(using settings: ScalaSettings)(using SettingsState, Context): String = + settings.allSettings.find(isHelping) match + case Some(s) => s.description + case _ => + if (settings.help.value) usageMessage + else if (settings.Vhelp.value) vusageMessage + else if (settings.Whelp.value) wusageMessage + else if (settings.Xhelp.value) xusageMessage + else if (settings.Yhelp.value) yusageMessage + else if (settings.showPlugins.value) ctx.base.pluginDescriptions + else if (settings.XshowPhases.value) phasesMessage + else "" + + final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean = + import settings._ + val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases) + flags.exists(_.value) || allSettings.exists(isHelping) diff --git a/tests/pos-with-compiler-cc/dotc/config/Config.scala b/tests/pos-with-compiler-cc/dotc/config/Config.scala new file mode 100644 index 000000000000..cbd50429492e --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Config.scala @@ -0,0 +1,256 @@ +package dotty.tools.dotc.config + +object Config { + + inline val cacheMembersNamed = true + inline val cacheAsSeenFrom = true + inline val cacheMemberNames = true + inline val cacheImplicitScopes = true + inline val cacheMatchReduced = true + + /** If true, the `runWithOwner` operation uses a re-usable context, + * similar to explore. This requires that the context does not escape + * the call. If false, `runWithOwner` runs its operation argument + * in a fresh context. + */ + inline val reuseOwnerContexts = true + + inline val checkCacheMembersNamed = false + + /** When updating a constraint bound, check that the constrained parameter + * does not appear at the top-level of either of its bounds. + */ + inline val checkConstraintsNonCyclic = false + + /** Check that reverse dependencies in constraints are correct and complete. + * Can also be enabled using -Ycheck-constraint-deps. + */ + inline val checkConstraintDeps = false + + /** Check that each constraint resulting from a subtype test + * is satisfiable. Also check that a type variable instantiation + * satisfies its constraints. + * Note that this can fail when bad bounds are in scope, like in + * tests/neg/i4721a.scala. + */ + inline val checkConstraintsSatisfiable = false + + /** Check that each constraint is fully propagated. i.e. + * If P <: Q then the upper bound of P is a subtype of the upper bound of Q + * and the lower bound of Q is a subtype of the lower bound of P. + */ + inline val checkConstraintsPropagated = false + + /** Check that constraint bounds do not contain wildcard types */ + inline val checkNoWildcardsInConstraint = false + + /** If a constraint is over a type lambda `tl` and `tvar` is one of + * the type variables associated with `tl` in the constraint, check + * that the origin of `tvar` is a parameter of `tl`. + */ + inline val checkConsistentVars = false + + /** Check that constraints of globally committable typer states are closed. + * NOTE: When enabled, the check can cause CyclicReference errors because + * it traverses all elements of a type. Such failures were observed when + * compiling all of dotty together (source seems to be in GenBCode which + * accesses javac's settings.) + * + * It is recommended to turn this option on only when chasing down + * a TypeParamRef instantiation error. See comment in Types.TypeVar.instantiate. + */ + inline val debugCheckConstraintsClosed = false + + /** Check that no type appearing as the info of a SymDenotation contains + * skolem types. + */ + inline val checkNoSkolemsInInfo = false + + /** Check that Name#toString is not called directly from backend by analyzing + * the stack trace of each toString call on names. This is very expensive, + * so not suitable for continuous testing. But it can be used to find a problem + * when running a specific test. + */ + inline val checkBackendNames = false + + /** Check that re-used type comparers are in their initialization state */ + inline val checkTypeComparerReset = false + + /** Type comparer will fail with an assert if the upper bound + * of a constrained parameter becomes Nothing. This should be turned + * on only for specific debugging as normally instantiation to Nothing + * is not an error condition. + */ + inline val failOnInstantiationToNothing = false + + /** Enable noDoubleDef checking if option "-YnoDoubleDefs" is set. + * The reason to have an option as well as the present global switch is + * that the noDoubleDef checking is done in a hotspot, and we do not + * want to incur the overhead of checking an option each time. + */ + inline val checkNoDoubleBindings = true + + /** Check positions for consistency after parsing */ + inline val checkPositions = true + + /** Check that typed trees don't point to untyped ones */ + inline val checkTreesConsistent = false + + /** Show subtype traces for all deep subtype recursions */ + inline val traceDeepSubTypeRecursions = false + + /** When explaining subtypes and this flag is set, also show the classes of the compared types. */ + inline val verboseExplainSubtype = false + + /** If this flag is set, take the fast path when comparing same-named type-aliases and types */ + inline val fastPathForRefinedSubtype = true + + /** If this flag is set, and we compute `T1[X1]` & `T2[X2]` as a new + * upper bound of a constrained parameter, try to align the arguments by computing + * `S1 =:= S2` (which might instantiate type parameters). + * This rule is contentious because it cuts the constraint set. + * + * For more info, see the comment in `TypeComparer#glbArgs`. + */ + inline val alignArgsInAnd = true + + /** If this flag is set, higher-kinded applications are checked for validity + */ + inline val checkHKApplications = false + + /** If this flag is set, method types are checked for valid parameter references + */ + inline val checkMethodTypes = false + + /** If this flag is set, it is checked that TypeRefs don't refer directly + * to themselves. + */ + inline val checkTypeRefCycles = false + + /** If this flag is set, we check that types assigned to trees are error types only + * if some error was already reported. There are complicicated scenarios where this + * is not true. An example is TestNonCyclic in posTwice. If we remove the + * first (unused) import `import dotty.tools.dotc.core.Types.Type` in `CompilationUnit`, + * we end up assigning a CyclicReference error type to an import expression `annotation` + * before the cyclic reference is reported. What happens is that the error was reported + * as a result of a completion in a not-yet committed typerstate. So we cannot enforce + * this in all circumstances. But since it is almost always true it is useful to + * keep the Config option for debugging. + */ + inline val checkUnreportedErrors = false + + /** If this flag is set, it is checked that class type parameters are + * only references with NoPrefix or ThisTypes as prefixes. This option + * is usually disabled, because there are still some legitimate cases where + * this can arise (e.g. for pos/Map.scala, in LambdaType.integrate). + */ + inline val checkTypeParamRefs = false + + /** The recursion depth for showing a summarized string */ + inline val summarizeDepth = 2 + + /** Check that variances of lambda arguments match the + * variance of the underlying lambda class. + */ + inline val checkLambdaVariance = false + + /** Check that certain types cannot be created in erasedTypes phases. + * Note: Turning this option on will get some false negatives, since it is + * possible that And/Or types are still created during erasure as the result + * of some operation on an existing type. + */ + inline val checkUnerased = false + + /** Check that atoms-based comparisons match regular comparisons that do not + * take atoms into account. The two have to give the same results, since + * atoms comparison is intended to be just an optimization. + */ + inline val checkAtomsComparisons = false + + /** In `derivedSelect`, rewrite + * + * (S & T)#A --> S#A & T#A + * (S | T)#A --> S#A | T#A + * + * Not sure whether this is useful. Preliminary measurements show a slowdown of about + * 7% for the build when this option is enabled. + */ + inline val splitProjections = false + + /** If this flag is on, always rewrite an application `S[Ts]` where `S` is an alias for + * `[Xs] -> U` to `[Xs := Ts]U`. + * Turning this flag on was observed to give a ~6% speedup on the JUnit test suite. + */ + inline val simplifyApplications = true + + /** Assume -indent by default */ + inline val defaultIndent = true + + /** If set, prints a trace of all symbol completions */ + inline val showCompletions = false + + /** If set, show variable/variable reverse dependencies when printing constraints. */ + inline val showConstraintDeps = true + + /** If set, method results that are context functions are flattened by adding + * the parameters of the context function results to the methods themselves. + * This is an optimization that reduces closure allocations. + */ + inline val flattenContextFunctionResults = true + + /** If set, enables tracing */ + inline val tracingEnabled = false + + /** Initial capacity of the uniques HashMap. + * Note: This should be a power of two to work with util.HashSet + */ + inline val initialUniquesCapacity = 0x8000 + + /** How many recursive calls to NamedType#underlying are performed before logging starts. */ + inline val LogPendingUnderlyingThreshold = 50 + + /** How many recursive calls to isSubType are performed before logging starts. */ + inline val LogPendingSubTypesThreshold = 50 + + /** How many recursive calls to findMember are performed before logging names starts + * Note: this threshold has to be chosen carefully. Too large, and programs + * like tests/pos/IterableSelfRec go into polynomial (or even exponential?) + * compile time slowdown. Too small and normal programs will cause the compiler to + * do inefficient operations on findMember. The current value is determined + * so that (1) IterableSelfRec still compiles in reasonable time (< 10sec) (2) Compiling + * dotty itself only causes small pending names lists to be generated (we measured + * at max 6 elements) and these lists are never searched with contains. + */ + inline val LogPendingFindMemberThreshold = 9 + + /** When in IDE, turn StaleSymbol errors into warnings instead of crashing */ + inline val ignoreStaleInIDE = true + + /** If true, `Denotation#asSeenFrom` is allowed to return an existing + * `SymDenotation` instead of allocating a new `SingleDenotation` if + * the two would only differ in their `prefix` (SymDenotation always + * have `NoPrefix` as their prefix). + * This is done for performance reasons: when compiling Dotty itself this + * reduces the number of allocated denotations by ~50%. + */ + inline val reuseSymDenotations = true + + /** If `checkLevelsOnConstraints` is true, check levels of type variables + * and create fresh ones as needed when bounds are first entered intot he constraint. + * If `checkLevelsOnInstantiation` is true, allow level-incorrect constraints but + * fix levels on type variable instantiation. + */ + inline val checkLevelsOnConstraints = false + inline val checkLevelsOnInstantiation = true + + /** If true, print capturing types in the form `{c} T`. + * If false, print them in the form `T @retains(c)`. + */ + inline val printCaptureSetsAsPrefix = true + + /** If true, allow mappping capture set variables under captureChecking with maps that are neither + * bijective nor idempotent. We currently do now know how to do this correctly in all + * cases, though. + */ + inline val ccAllowUnsoundMaps = false +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Feature.scala b/tests/pos-with-compiler-cc/dotc/config/Feature.scala new file mode 100644 index 000000000000..1637c9268e30 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Feature.scala @@ -0,0 +1,173 @@ +package dotty.tools +package dotc +package config + +import core._ +import Contexts._, Symbols._, Names._ +import StdNames.nme +import Decorators.* +import util.{SrcPos, NoSourcePosition} +import SourceVersion._ +import reporting.Message +import NameKinds.QualifiedName +import language.experimental.pureFunctions + +object Feature: + + def experimental(str: PreName): TermName = + QualifiedName(nme.experimental, str.toTermName) + + private def deprecated(str: PreName): TermName = + QualifiedName(nme.deprecated, str.toTermName) + + private val namedTypeArguments = experimental("namedTypeArguments") + private val genericNumberLiterals = experimental("genericNumberLiterals") + val scala2macros = experimental("macros") + + val dependent = experimental("dependent") + val erasedDefinitions = experimental("erasedDefinitions") + val symbolLiterals = deprecated("symbolLiterals") + val fewerBraces = experimental("fewerBraces") + val saferExceptions = experimental("saferExceptions") + val pureFunctions = experimental("pureFunctions") + val captureChecking = experimental("captureChecking") + val into = experimental("into") + + val globalOnlyImports: Set[TermName] = Set(pureFunctions, captureChecking) + + /** Is `feature` enabled by by a command-line setting? The enabling setting is + * + * -language:feature + * + * where is the fully qualified name of `owner`, followed by a ".", + * but subtracting the prefix `scala.language.` at the front. + */ + def enabledBySetting(feature: TermName)(using Context): Boolean = + ctx.base.settings.language.value.contains(feature.toString) + + /** Is `feature` enabled by by an import? This is the case if the feature + * is imported by a named import + * + * import owner.feature + * + * and there is no visible nested import that excludes the feature, as in + * + * import owner.{ feature => _ } + */ + def enabledByImport(feature: TermName)(using Context): Boolean = + //atPhase(typerPhase) { + val info = ctx.importInfo + info != null && info.featureImported(feature) + //} + + /** Is `feature` enabled by either a command line setting or an import? + * @param feature The name of the feature + * @param owner The prefix symbol (nested in `scala.language`) where the + * feature is defined. + */ + def enabled(feature: TermName)(using Context): Boolean = + enabledBySetting(feature) || enabledByImport(feature) + + /** Is auto-tupling enabled? */ + def autoTuplingEnabled(using Context): Boolean = !enabled(nme.noAutoTupling) + + def dynamicsEnabled(using Context): Boolean = enabled(nme.dynamics) + + def dependentEnabled(using Context) = enabled(dependent) + + def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) + + def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) + + def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + + /** Is pureFunctions enabled for this compilation unit? */ + def pureFunsEnabled(using Context) = + enabledBySetting(pureFunctions) + || ctx.compilationUnit.knowsPureFuns + || ccEnabled + + /** Is captureChecking enabled for this compilation unit? */ + def ccEnabled(using Context) = + enabledBySetting(captureChecking) + || ctx.compilationUnit.needsCaptureChecking + + /** Is pureFunctions enabled for any of the currently compiled compilation units? */ + def pureFunsEnabledSomewhere(using Context) = + enabledBySetting(pureFunctions) + || ctx.run != null && ctx.run.nn.pureFunsImportEncountered + || ccEnabledSomewhere + + /** Is captureChecking enabled for any of the currently compiled compilation units? */ + def ccEnabledSomewhere(using Context) = + enabledBySetting(captureChecking) + || ctx.run != null && ctx.run.nn.ccImportEncountered + + def sourceVersionSetting(using Context): SourceVersion = + SourceVersion.valueOf(ctx.settings.source.value) + + def sourceVersion(using Context): SourceVersion = + ctx.compilationUnit.sourceVersion match + case Some(v) => v + case none => sourceVersionSetting + + def migrateTo3(using Context): Boolean = + sourceVersion == `3.0-migration` + + def fewerBracesEnabled(using Context) = + sourceVersion.isAtLeast(`3.3`) || enabled(fewerBraces) + + /** If current source migrates to `version`, issue given warning message + * and return `true`, otherwise return `false`. + */ + def warnOnMigration(msg: Message, pos: SrcPos, version: SourceVersion)(using Context): Boolean = + if sourceVersion.isMigrating && sourceVersion.stable == version + || (version == `3.0` || version == `3.1`) && migrateTo3 + then + report.migrationWarning(msg, pos) + true + else + false + + def checkExperimentalFeature(which: String, srcPos: SrcPos, note: -> String = "")(using Context) = + if !isExperimentalEnabled then + report.error(em"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) + + def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = + if !isExperimentalEnabled then + val symMsg = + if sym.hasAnnotation(defn.ExperimentalAnnot) then + i"$sym is marked @experimental" + else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then + i"${sym.owner} is marked @experimental" + else + i"$sym inherits @experimental" + report.error(em"$symMsg and therefore may only be used in an experimental scope.", srcPos) + + /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ + def checkExperimentalSettings(using Context): Unit = + for setting <- ctx.settings.language.value + if setting.startsWith("experimental.") && setting != "experimental.macros" + do checkExperimentalFeature(s"feature $setting", NoSourcePosition) + + def isExperimentalEnabled(using Context): Boolean = + Properties.experimental && !ctx.settings.YnoExperimental.value + + /** Handle language import `import language..` if it is one + * of the global imports `pureFunctions` or `captureChecking`. In this case + * make the compilation unit's and current run's fields accordingly. + * @return true iff import that was handled + */ + def handleGlobalLanguageImport(prefix: TermName, imported: Name)(using Context): Boolean = + val fullFeatureName = QualifiedName(prefix, imported.asTermName) + if fullFeatureName == pureFunctions then + ctx.compilationUnit.knowsPureFuns = true + if ctx.run != null then ctx.run.nn.pureFunsImportEncountered = true + true + else if fullFeatureName == captureChecking then + ctx.compilationUnit.needsCaptureChecking = true + if ctx.run != null then ctx.run.nn.ccImportEncountered = true + true + else + false +end Feature diff --git a/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala new file mode 100644 index 000000000000..2b2f35e49451 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala @@ -0,0 +1,69 @@ +package dotty.tools +package dotc +package config + +import io._ +import classpath.AggregateClassPath +import core._ +import Symbols._, Types._, Contexts._, StdNames._ +import Flags._ +import transform.ExplicitOuter, transform.SymUtils._ + +class JavaPlatform extends Platform { + + private var currentClassPath: Option[ClassPath] = None + + def classPath(using Context): ClassPath = { + if (currentClassPath.isEmpty) + currentClassPath = Some(new PathResolver().result) + val cp = currentClassPath.get + cp + } + + // The given symbol is a method with the right name and signature to be a runnable java program. + def isMainMethod(sym: Symbol)(using Context): Boolean = + (sym.name == nme.main) && (sym.info match { + case MethodTpe(_, defn.ArrayOf(el) :: Nil, restpe) => el =:= defn.StringType && (restpe isRef defn.UnitClass) + case _ => false + }) + + /** Update classpath with a substituted subentry */ + def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = currentClassPath.get match { + case AggregateClassPath(entries) => + currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e)))) + case cp: ClassPath => + currentClassPath = Some(subst.getOrElse(cp, cp)) + } + + def rootLoader(root: TermSymbol)(using Context): SymbolLoader = new SymbolLoaders.PackageLoader(root, classPath) + + /** Is the SAMType `cls` also a SAM under the rules of the JVM? */ + def isSam(cls: ClassSymbol)(using Context): Boolean = + cls.isAllOf(NoInitsTrait) && + cls.superClass == defn.ObjectClass && + cls.directlyInheritedTraits.forall(_.is(NoInits)) && + !ExplicitOuter.needsOuterIfReferenced(cls) && + cls.typeRef.fields.isEmpty // Superaccessors already show up as abstract methods here, so no test necessary + + /** We could get away with excluding BoxedBooleanClass for the + * purpose of equality testing since it need not compare equal + * to anything but other booleans, but it should be present in + * case this is put to other uses. + */ + def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = { + val d = defn + import d._ + (sym == ObjectClass) || + (sym == JavaSerializableClass) || + (sym == ComparableClass) || + (sym derivesFrom BoxedNumberClass) || + (sym derivesFrom BoxedCharClass) || + (sym derivesFrom BoxedBooleanClass) + } + + def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = + true + + def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader = + new ClassfileLoader(bin) +} diff --git a/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala b/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala new file mode 100644 index 000000000000..0411c5604768 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala @@ -0,0 +1,117 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +import io._ + +/** A class for holding mappings from source directories to + * their output location. This functionality can be accessed + * only programmatically. The command line compiler uses a + * single output location, but tools may use this functionality + * to set output location per source directory. + */ +class OutputDirs { + /** Pairs of source directory - destination directory. */ + private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil + + /** If this is not None, the output location where all + * classes should go. + */ + private var singleOutDir: Option[AbstractFile] = None + + /** Add a destination directory for sources found under srcdir. + * Both directories should exits. + */ + def add(srcDir: String, outDir: String): Unit = + add(checkDir(AbstractFile.getDirectory(srcDir), srcDir), + checkDir(AbstractFile.getDirectory(outDir), outDir)) + + /** Check that dir is exists and is a directory. */ + private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = ( + if (dir != null && dir.isDirectory) + dir + // was: else if (allowJar && dir == null && Path.isJarOrZip(name, false)) + else if (allowJar && dir == null && Jar.isJarOrZip(File(name), false)) + new PlainFile(Path(name)) + else + throw new FatalError(name + " does not exist or is not a directory")) + + /** Set the single output directory. From now on, all files will + * be dumped in there, regardless of previous calls to 'add'. + */ + def setSingleOutput(outDir: String): Unit = { + val dst = AbstractFile.getDirectory(outDir) + setSingleOutput(checkDir(dst, outDir, true)) + } + + def getSingleOutput: Option[AbstractFile] = singleOutDir + + /** Set the single output directory. From now on, all files will + * be dumped in there, regardless of previous calls to 'add'. + */ + def setSingleOutput(dir: AbstractFile): Unit = + singleOutDir = Some(dir) + + def add(src: AbstractFile, dst: AbstractFile): Unit = { + singleOutDir = None + outputDirs ::= ((src, dst)) + } + + /** Return the list of source-destination directory pairs. */ + def outputs: List[(AbstractFile, AbstractFile)] = outputDirs + + /** Return the output directory for the given file. + */ + def outputDirFor(src: AbstractFile): AbstractFile = { + def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = + src.path.startsWith(srcDir.path) + + singleOutDir match { + case Some(d) => d + case None => + (outputs find (isBelow _).tupled) match { + case Some((_, d)) => d + case _ => + throw new FatalError("Could not find an output directory for " + + src.path + " in " + outputs) + } + } + } + + /** Return the source file path(s) which correspond to the given + * classfile path and SourceFile attribute value, subject to the + * condition that source files are arranged in the filesystem + * according to Java package layout conventions. + * + * The given classfile path must be contained in at least one of + * the specified output directories. If it does not then this + * method returns Nil. + * + * Note that the source file is not required to exist, so assuming + * a valid classfile path this method will always return a list + * containing at least one element. + * + * Also that if two or more source path elements target the same + * output directory there will be two or more candidate source file + * paths. + */ + def srcFilesFor(classFile: AbstractFile, srcPath: String): List[AbstractFile] = { + def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = + classFile.path.startsWith(outDir.path) + + singleOutDir match { + case Some(d) => + d match { + case _: VirtualDirectory | _: io.ZipArchive => Nil + case _ => List(d.lookupPathUnchecked(srcPath, false)) + } + case None => + (outputs filter (isBelow _).tupled) match { + case Nil => Nil + case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false)) + } + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala b/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala new file mode 100644 index 000000000000..afa30e38dc2a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala @@ -0,0 +1,268 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +import WrappedProperties.AccessControl +import io.{ClassPath, Directory, Path} +import classpath.{AggregateClassPath, ClassPathFactory, JrtClassPath} +import ClassPath.split +import PartialFunction.condOpt +import core.Contexts._ +import Settings._ +import dotty.tools.io.File + +object PathResolver { + + // Imports property/environment functions which suppress + // security exceptions. + import AccessControl._ + + def firstNonEmpty(xs: String*): String = xs find (_ != "") getOrElse "" + + /** Map all classpath elements to absolute paths and reconstruct the classpath. + */ + def makeAbsolute(cp: String): String = ClassPath.map(cp, x => Path(x).toAbsolute.path) + + /** pretty print class path + */ + def ppcp(s: String): String = split(s) match { + case Nil => "" + case Seq(x) => x + case xs => xs.map("\n" + _).mkString + } + + /** Values found solely by inspecting environment or property variables. + */ + object Environment { + private def searchForBootClasspath = ( + systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse "" + ) + + /** Environment variables which java pays attention to so it + * seems we do as well. + */ + def classPathEnv: String = envOrElse("CLASSPATH", "") + def sourcePathEnv: String = envOrElse("SOURCEPATH", "") + + def javaBootClassPath: String = propOrElse("sun.boot.class.path", searchForBootClasspath) + + def javaExtDirs: String = propOrEmpty("java.ext.dirs") + def scalaHome: String = propOrEmpty("scala.home") + def scalaExtDirs: String = propOrEmpty("scala.ext.dirs") + + /** The java classpath and whether to use it. + */ + def javaUserClassPath: String = propOrElse("java.class.path", "") + def useJavaClassPath: Boolean = propOrFalse("scala.usejavacp") + + override def toString: String = s""" + |object Environment { + | scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath) + | javaBootClassPath = <${javaBootClassPath.length} chars> + | javaExtDirs = ${ppcp(javaExtDirs)} + | javaUserClassPath = ${ppcp(javaUserClassPath)} + | scalaExtDirs = ${ppcp(scalaExtDirs)} + |}""".trim.stripMargin + } + + /** Default values based on those in Environment as interpreted according + * to the path resolution specification. + */ + object Defaults { + def scalaSourcePath: String = Environment.sourcePathEnv + def javaBootClassPath: String = Environment.javaBootClassPath + def javaUserClassPath: String = Environment.javaUserClassPath + def javaExtDirs: String = Environment.javaExtDirs + def useJavaClassPath: Boolean = Environment.useJavaClassPath + + def scalaHome: String = Environment.scalaHome + def scalaHomeDir: Directory = Directory(scalaHome) + def scalaHomeExists: Boolean = scalaHomeDir.isDirectory + def scalaLibDir: Directory = (scalaHomeDir / "lib").toDirectory + def scalaClassesDir: Directory = (scalaHomeDir / "classes").toDirectory + + def scalaLibAsJar: File = (scalaLibDir / "scala-library.jar").toFile + def scalaLibAsDir: Directory = (scalaClassesDir / "library").toDirectory + + def scalaLibDirFound: Option[Directory] = + if (scalaLibAsJar.isFile) Some(scalaLibDir) + else if (scalaLibAsDir.isDirectory) Some(scalaClassesDir) + else None + + def scalaLibFound: String = + if (scalaLibAsJar.isFile) scalaLibAsJar.path + else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path + else "" + + // XXX It must be time for someone to figure out what all these things + // are intended to do. This is disabled here because it was causing all + // the scala jars to end up on the classpath twice: one on the boot + // classpath as set up by the runner (or regular classpath under -nobootcp) + // and then again here. + def scalaBootClassPath: String = "" + // scalaLibDirFound match { + // case Some(dir) if scalaHomeExists => + // val paths = ClassPath expandDir dir.path + // join(paths: _*) + // case _ => "" + // } + + def scalaExtDirs: String = Environment.scalaExtDirs + + def scalaPluginPath: String = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path + + override def toString: String = """ + |object Defaults { + | scalaHome = %s + | javaBootClassPath = %s + | scalaLibDirFound = %s + | scalaLibFound = %s + | scalaBootClassPath = %s + | scalaPluginPath = %s + |}""".trim.stripMargin.format( + scalaHome, + ppcp(javaBootClassPath), + scalaLibDirFound, scalaLibFound, + ppcp(scalaBootClassPath), ppcp(scalaPluginPath) + ) + } + + def fromPathString(path: String)(using Context): ClassPath = { + val settings = ctx.settings.classpath.update(path) + inContext(ctx.fresh.setSettings(settings)) { + new PathResolver().result + } + } + + /** Show values in Environment and Defaults when no argument is provided. + * Otherwise, show values in Calculated as if those options had been given + * to a scala runner. + */ + def main(args: Array[String]): Unit = + if (args.isEmpty) { + println(Environment) + println(Defaults) + } + else inContext(ContextBase().initialCtx) { + val ArgsSummary(sstate, rest, errors, warnings) = + ctx.settings.processArguments(args.toList, true, ctx.settingsState) + errors.foreach(println) + val pr = inContext(ctx.fresh.setSettings(sstate)) { + new PathResolver() + } + println(" COMMAND: 'scala %s'".format(args.mkString(" "))) + println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) + + pr.result match { + case cp: AggregateClassPath => + println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + } + } +} + +import PathResolver.{Defaults, ppcp} + +class PathResolver(using c: Context) { + import c.base.settings + + private val classPathFactory = new ClassPathFactory + + private def cmdLineOrElse(name: String, alt: String) = + commandLineFor(name) match { + case Some("") | None => alt + case Some(x) => x + } + + private def commandLineFor(s: String): Option[String] = condOpt(s) { + case "javabootclasspath" => settings.javabootclasspath.value + case "javaextdirs" => settings.javaextdirs.value + case "bootclasspath" => settings.bootclasspath.value + case "extdirs" => settings.extdirs.value + case "classpath" | "cp" => settings.classpath.value + case "sourcepath" => settings.sourcepath.value + } + + /** Calculated values based on any given command line options, falling back on + * those in Defaults. + */ + object Calculated { + def scalaHome: String = Defaults.scalaHome + def useJavaClassPath: Boolean = settings.usejavacp.value || Defaults.useJavaClassPath + def javaBootClassPath: String = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath) + def javaExtDirs: String = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs) + def javaUserClassPath: String = if (useJavaClassPath) Defaults.javaUserClassPath else "" + def scalaBootClassPath: String = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath) + def scalaExtDirs: String = cmdLineOrElse("extdirs", Defaults.scalaExtDirs) + /** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as: + * [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect + * [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg) + * [scaladoc] ^ + * Because bootstrapping looks at the sourcepath and creates the package "reflect" in "" it will cause the + * typedIdentifier to pick .reflect instead of the .scala.reflect package. Thus, no bootstrapping for scaladoc! + */ + def sourcePath: String = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) + + def userClassPath: String = + if (!settings.classpath.isDefault) settings.classpath.value + else sys.env.getOrElse("CLASSPATH", ".") + + import classPathFactory._ + + // Assemble the elements! + def basis: List[Traversable[ClassPath]] = + val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) + + List( + JrtClassPath(release), // 1. The Java 9+ classpath (backed by the jrt:/ virtual system, if available) + classesInPath(javaBootClassPath), // 2. The Java bootstrap class path. + contentsOfDirsInPath(javaExtDirs), // 3. The Java extension class path. + classesInExpandedPath(javaUserClassPath), // 4. The Java application class path. + classesInPath(scalaBootClassPath), // 5. The Scala boot class path. + contentsOfDirsInPath(scalaExtDirs), // 6. The Scala extension class path. + classesInExpandedPath(userClassPath), // 7. The Scala application class path. + sourcesInPath(sourcePath) // 8. The Scala source path. + ) + + lazy val containers: List[ClassPath] = basis.flatten.distinct + + override def toString: String = """ + |object Calculated { + | scalaHome = %s + | javaBootClassPath = %s + | javaExtDirs = %s + | javaUserClassPath = %s + | useJavaClassPath = %s + | scalaBootClassPath = %s + | scalaExtDirs = %s + | userClassPath = %s + | sourcePath = %s + |}""".trim.stripMargin.format( + scalaHome, + ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath), + useJavaClassPath, + ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath), + ppcp(sourcePath) + ) + } + + def containers: List[ClassPath] = Calculated.containers + + lazy val result: ClassPath = { + val cp = AggregateClassPath(containers.toIndexedSeq) + + if (settings.YlogClasspath.value) { + Console.println("Classpath built from " + settings.toConciseString(ctx.settingsState)) + Console.println("Defaults: " + PathResolver.Defaults) + Console.println("Calculated: " + Calculated) + + val xs = (Calculated.basis drop 2).flatten.distinct + println("After java boot/extdirs classpath has %d entries:" format xs.size) + xs foreach (x => println(" " + x)) + } + cp + } + + def asURLs: Seq[java.net.URL] = result.asURLs +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Platform.scala b/tests/pos-with-compiler-cc/dotc/config/Platform.scala new file mode 100644 index 000000000000..0faacf1bcebb --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Platform.scala @@ -0,0 +1,46 @@ +package dotty.tools +package dotc +package config + +import io.{ClassPath, AbstractFile} +import core.Contexts._, core.Symbols._ +import core.SymbolLoader +import core.StdNames.nme +import core.Flags.Module + +/** The platform dependent pieces of Global. + */ +abstract class Platform { + + /** The root symbol loader. */ + def rootLoader(root: TermSymbol)(using Context): SymbolLoader + + /** The compiler classpath. */ + def classPath(using Context): ClassPath + + /** Update classpath with a substitution that maps entries to entries */ + def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit + + /** Any platform-specific phases. */ + //def platformPhases: List[SubComponent] + + /** Is the SAMType `cls` also a SAM under the rules of the platform? */ + def isSam(cls: ClassSymbol)(using Context): Boolean + + /** The various ways a boxed primitive might materialize at runtime. */ + def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean + + /** Is the given class symbol eligible for Java serialization-specific methods? */ + def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean + + /** Create a new class loader to load class file `bin` */ + def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader + + /** The given symbol is a method with the right name and signature to be a runnable program. */ + def isMainMethod(sym: Symbol)(using Context): Boolean + + /** The given class has a main method. */ + final def hasMainMethod(sym: Symbol)(using Context): Boolean = + sym.info.member(nme.main).hasAltWith(d => + isMainMethod(d.symbol) && (sym.is(Module) || d.symbol.isStatic)) +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Printers.scala b/tests/pos-with-compiler-cc/dotc/config/Printers.scala new file mode 100644 index 000000000000..ecb189de9bb3 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Printers.scala @@ -0,0 +1,52 @@ +package dotty.tools.dotc.config + +object Printers { + + class Printer { + def println(msg: => String): Unit = System.out.nn.println(msg) + } + + object noPrinter extends Printer { + inline override def println(msg: => String): Unit = () + } + + val default = new Printer + + val capt = noPrinter + val constr = noPrinter + val core = noPrinter + val checks = noPrinter + val config = noPrinter + val cyclicErrors = noPrinter + val debug = noPrinter + val derive = noPrinter + val desugar = noPrinter + val scaladoc = noPrinter + val exhaustivity = noPrinter + val gadts = noPrinter + val gadtsConstr = noPrinter + val hk = noPrinter + val implicits = noPrinter + val implicitsDetailed = noPrinter + val lexical = noPrinter + val init = noPrinter + val inlining = noPrinter + val interactiv = noPrinter + val matchTypes = noPrinter + val nullables = noPrinter + val overload = noPrinter + val patmatch = noPrinter + val pickling = noPrinter + val quotePickling = noPrinter + val plugins = noPrinter + val recheckr = noPrinter + val refcheck = noPrinter + val simplify = noPrinter + val staging = noPrinter + val subtyping = noPrinter + val tailrec = noPrinter + val transforms = noPrinter + val typr = noPrinter + val unapp = noPrinter + val variances = noPrinter +} diff --git a/tests/pos-with-compiler-cc/dotc/config/Properties.scala b/tests/pos-with-compiler-cc/dotc/config/Properties.scala new file mode 100644 index 000000000000..1e9cc82112af --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Properties.scala @@ -0,0 +1,142 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +import scala.annotation.internal.sharable + +import java.io.IOException +import java.util.jar.Attributes.{ Name => AttributeName } +import java.nio.charset.StandardCharsets + +/** Loads `library.properties` from the jar. */ +object Properties extends PropertiesTrait { + protected def propCategory: String = "compiler" + protected def pickJarBasedOn: Class[PropertiesTrait] = classOf[PropertiesTrait] + + /** Scala manifest attributes. + */ + @sharable val ScalaCompilerVersion: AttributeName = new AttributeName("Scala-Compiler-Version") +} + +trait PropertiesTrait { + protected def propCategory: String // specializes the remainder of the values + protected def pickJarBasedOn: Class[?] // props file comes from jar containing this + + /** The name of the properties file */ + protected val propFilename: String = "/" + propCategory + ".properties" + + /** The loaded properties */ + @sharable protected lazy val scalaProps: java.util.Properties = { + val props = new java.util.Properties + val stream = pickJarBasedOn getResourceAsStream propFilename + if (stream ne null) + quietlyDispose(props load stream, stream.close) + + props + } + + private def quietlyDispose(action: => Unit, disposal: => Unit) = + try { action } + finally + try { disposal } + catch { case _: IOException => } + + def propIsSet(name: String): Boolean = System.getProperty(name) != null + def propIsSetTo(name: String, value: String): Boolean = propOrNull(name) == value + def propOrElse(name: String, alt: String): String = System.getProperty(name, alt) + def propOrEmpty(name: String): String = propOrElse(name, "") + def propOrNull(name: String): String = propOrElse(name, null) + def propOrNone(name: String): Option[String] = Option(propOrNull(name)) + def propOrFalse(name: String): Boolean = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase) + def setProp(name: String, value: String): String = System.setProperty(name, value) + def clearProp(name: String): String = System.clearProperty(name) + + def envOrElse(name: String, alt: String): String = Option(System getenv name) getOrElse alt + def envOrNone(name: String): Option[String] = Option(System getenv name) + + // for values based on propFilename + def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt) + def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "") + def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)) + + /** Either the development or release version if known, otherwise + * the empty string. + */ + def versionNumberString: String = scalaPropOrEmpty("version.number") + + /** The version number of the jar this was loaded from, + * or `"(unknown)"` if it cannot be determined. + */ + val simpleVersionString: String = { + val v = scalaPropOrElse("version.number", "(unknown)") + v + ( + if (v.contains("SNAPSHOT") || v.contains("NIGHTLY")) + "-git-" + scalaPropOrElse("git.hash", "(unknown)") + else + "" + ) + } + + /** The version number of the jar this was loaded from plus `"version "` prefix, + * or `"version (unknown)"` if it cannot be determined. + */ + val versionString: String = "version " + simpleVersionString + + /** Whether the current version of compiler is experimental + * + * 1. Snapshot, nightly releases and non-bootstrapped compiler are experimental. + * 2. Features supported by experimental versions of the compiler: + * - research plugins + */ + val experimental: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") + + val copyrightString: String = scalaPropOrElse("copyright.string", "(c) 2002-2017 LAMP/EPFL") + + /** This is the encoding to use reading in source files, overridden with -encoding + * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. + */ + def sourceEncoding: String = scalaPropOrElse("file.encoding", StandardCharsets.UTF_8.name) + def sourceReader: String = scalaPropOrElse("source.reader", "scala.tools.nsc.io.SourceReader") + + /** This is the default text encoding, overridden (unreliably) with + * `JAVA_OPTS="-Dfile.encoding=Foo"` + */ + def encodingString: String = propOrElse("file.encoding", StandardCharsets.UTF_8.name) + + /** The default end of line character. + */ + def lineSeparator: String = propOrElse("line.separator", "\n") + + /** Various well-known properties. + */ + def javaClassPath: String = propOrEmpty("java.class.path") + def javaHome: String = propOrEmpty("java.home") + def javaVendor: String = propOrEmpty("java.vendor") + def javaVersion: String = propOrEmpty("java.version") + def javaVmInfo: String = propOrEmpty("java.vm.info") + def javaVmName: String = propOrEmpty("java.vm.name") + def javaVmVendor: String = propOrEmpty("java.vm.vendor") + def javaVmVersion: String = propOrEmpty("java.vm.version") + def osName: String = propOrEmpty("os.name") + def scalaHome: String = propOrEmpty("scala.home") + def tmpDir: String = propOrEmpty("java.io.tmpdir") + def userDir: String = propOrEmpty("user.dir") + def userHome: String = propOrEmpty("user.home") + def userName: String = propOrEmpty("user.name") + + /** Some derived values. + */ + def isWin: Boolean = osName startsWith "Windows" + def isMac: Boolean = javaVendor startsWith "Apple" + + // This is looking for javac, tools.jar, etc. + // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME, + // and finally the system property based javaHome. + def jdkHome: String = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome)) + + def versionMsg: String = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString) + def scalaCmd: String = if (isWin) "scala.bat" else "scala" + def scalacCmd: String = if (isWin) "scalac.bat" else "scalac" +} diff --git a/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala new file mode 100644 index 000000000000..ae417b717ca3 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala @@ -0,0 +1,35 @@ +package dotty.tools.dotc.config + +import dotty.tools.dotc.core._ +import Contexts._ +import Symbols._ + +import dotty.tools.backend.sjs.JSDefinitions + +object SJSPlatform { + /** The `SJSPlatform` for the current context. */ + def sjsPlatform(using Context): SJSPlatform = + ctx.platform.asInstanceOf[SJSPlatform] +} + +class SJSPlatform()(using DetachedContext) extends JavaPlatform { + + /** Scala.js-specific definitions. */ + val jsDefinitions: JSDefinitions = new JSDefinitions() + + /** Is the SAMType `cls` also a SAM under the rules of the Scala.js back-end? */ + override def isSam(cls: ClassSymbol)(using Context): Boolean = + defn.isFunctionClass(cls) + || cls.superClass == jsDefinitions.JSFunctionClass + + /** Is the given class symbol eligible for Java serialization-specific methods? + * + * This is not simply false because we still want to add them to Scala classes + * and objects. They might be transitively used by macros and other compile-time + * code. It feels safer to have them be somewhat equivalent to the ones we would + * get in a JVM project. The JVM back-end will slap an extends `java.io.Serializable` + * to them, so we should be consistent and also emit the proper serialization methods. + */ + override def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = + !sym.isSubClass(jsDefinitions.JSAnyClass) +} diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala new file mode 100644 index 000000000000..407171f1a0dd --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala @@ -0,0 +1,21 @@ +package dotty.tools.dotc.config + +enum ScalaRelease(val majorVersion: Int, val minorVersion: Int) extends Ordered[ScalaRelease]: + case Release3_0 extends ScalaRelease(3, 0) + case Release3_1 extends ScalaRelease(3, 1) + case Release3_2 extends ScalaRelease(3, 2) + + def show = s"$majorVersion.$minorVersion" + + def compare(that: ScalaRelease) = + val ord = summon[Ordering[(Int, Int)]] + ord.compare((majorVersion, minorVersion), (that.majorVersion, that.minorVersion)) + +object ScalaRelease: + def latest = Release3_1 + + def parse(name: String) = name match + case "3.0" => Some(Release3_0) + case "3.1" => Some(Release3_1) + case "3.2" => Some(Release3_2) + case _ => None diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala new file mode 100644 index 000000000000..20708b98cc95 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala @@ -0,0 +1,347 @@ +package dotty.tools.dotc +package config + +import scala.language.unsafeNulls + +import dotty.tools.dotc.config.PathResolver.Defaults +import dotty.tools.dotc.config.Settings.{Setting, SettingGroup} +import dotty.tools.dotc.config.SourceVersion +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.rewrites.Rewrites +import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory} + +import scala.util.chaining._ + +class ScalaSettings extends SettingGroup with AllScalaSettings + +object ScalaSettings: + // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` + private val minTargetVersion = 8 + private val maxTargetVersion = 21 + + def supportedTargetVersions: List[String] = + (minTargetVersion to maxTargetVersion).toList.map(_.toString) + + def supportedReleaseVersions: List[String] = + if scala.util.Properties.isJavaAtLeast("9") then + val jdkVersion = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() + val maxVersion = Math.min(jdkVersion, maxTargetVersion) + (minTargetVersion to maxVersion).toList.map(_.toString) + else List(minTargetVersion).map(_.toString) + + def supportedScalaReleaseVersions: List[String] = + ScalaRelease.values.toList.map(_.show) + + def supportedSourceVersions: List[String] = + SourceVersion.values.toList.map(_.toString) + + def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") + + def defaultPageWidth: Int = { + val defaultWidth = 80 + val columnsVar = System.getenv("COLUMNS") + if columnsVar != null then columnsVar.toInt + else if Properties.isWin then + val ansiconVar = System.getenv("ANSICON") // eg. "142x32766 (142x26)" + if ansiconVar != null && ansiconVar.matches("[0-9]+x.*") then + ansiconVar.substring(0, ansiconVar.indexOf("x")).toInt + else defaultWidth + else defaultWidth + } + +trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: + self: SettingGroup => + + /* Path related settings */ + val semanticdbTarget: Setting[String] = PathSetting("-semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") + + val source: Setting[String] = ChoiceSetting("-source", "source version", "source version", ScalaSettings.supportedSourceVersions, SourceVersion.defaultSourceVersion.toString, aliases = List("--source")) + val uniqid: Setting[Boolean] = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) + val rewrite: Setting[Option[Rewrites]] = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with a `...-migration` source version, rewrites sources to migrate to new version.", aliases = List("--rewrite")) + val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty files. The arguments are .tasty or .jar files.", aliases = List("--from-tasty")) + + val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions.") + val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") + val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") + val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) + + /* Decompiler settings */ + val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) + val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.", aliases = List("--print-lines")) + + /* Scala.js-related settings */ + val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only)") + val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only)") + + val projectUrl: Setting[String] = StringSetting ( + "-project-url", + "project repository homepage", + "The source repository of your project.", + "" + ) + + val wikiSyntax: Setting[Boolean] = BooleanSetting("-Xwiki-syntax", "Retains the Scala2 behavior of using Wiki Syntax in Scaladoc.") + + val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") + val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") +end AllScalaSettings + +/** Settings shared by compiler and scaladoc */ +trait CommonScalaSettings: + self: SettingGroup => + + /* Path related settings */ + val bootclasspath: Setting[String] = PathSetting("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath, aliases = List("--boot-class-path")) + val extdirs: Setting[String] = PathSetting("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs, aliases = List("--extension-directories")) + val javabootclasspath: Setting[String] = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath, aliases = List("--java-boot-class-path")) + val javaextdirs: Setting[String] = PathSetting("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs, aliases = List("--java-extension-directories")) + val sourcepath: Setting[String] = PathSetting("-sourcepath", "Specify location(s) of source files.", Defaults.scalaSourcePath, aliases = List("--source-path")) + val sourceroot: Setting[String] = PathSetting("-sourceroot", "Specify workspace root directory.", ".") + + val classpath: Setting[String] = PathSetting("-classpath", "Specify where to find user class files.", ScalaSettings.defaultClasspath, aliases = List("-cp", "--class-path")) + val outputDir: Setting[AbstractFile] = OutputSetting("-d", "directory|jar", "Destination for generated classfiles.", + new PlainDirectory(Directory("."))) + val color: Setting[String] = ChoiceSetting("-color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/, aliases = List("--color")) + val verbose: Setting[Boolean] = BooleanSetting("-verbose", "Output messages about what the compiler is doing.", aliases = List("--verbose")) + val version: Setting[Boolean] = BooleanSetting("-version", "Print product version and exit.", aliases = List("--version")) + val help: Setting[Boolean] = BooleanSetting("-help", "Print a synopsis of standard options.", aliases = List("--help", "-h")) + val pageWidth: Setting[Int] = IntSetting("-pagewidth", "Set page width", ScalaSettings.defaultPageWidth, aliases = List("--page-width")) + val silentWarnings: Setting[Boolean] = BooleanSetting("-nowarn", "Silence all warnings.", aliases = List("--no-warnings")) + + val javaOutputVersion: Setting[String] = ChoiceSetting("-java-output-version", "version", "Compile code with classes specific to the given version of the Java platform available on the classpath and emit bytecode for this version. Corresponds to -release flag in javac.", ScalaSettings.supportedReleaseVersions, "", aliases = List("-release", "--release")) + + val deprecation: Setting[Boolean] = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.", aliases = List("--deprecation")) + val feature: Setting[Boolean] = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.", aliases = List("--feature")) + val explain: Setting[Boolean] = BooleanSetting("-explain", "Explain errors in more detail.", aliases = List("--explain")) + // -explain-types setting is necessary for cross compilation, since it is mentioned in sbt-tpolecat, for instance + // it is otherwise subsumed by -explain, and should be dropped as soon as we can. + val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) + val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) + val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) + + /* Coverage settings */ + val coverageOutputDir = PathSetting("-coverage-out", "Destination for coverage classfiles and instrumentation data.", "", aliases = List("--coverage-out")) + + /* Other settings */ + val encoding: Setting[String] = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding, aliases = List("--encoding")) + val usejavacp: Setting[Boolean] = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.", aliases = List("--use-java-class-path")) + val scalajs: Setting[Boolean] = BooleanSetting("-scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).", aliases = List("--scalajs")) +end CommonScalaSettings + +/** -P "plugin" settings. Various tools might support plugins. */ +private sealed trait PluginSettings: + self: SettingGroup => + val plugin: Setting[List[String]] = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.") + val disable: Setting[List[String]] = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.") + val require: Setting[List[String]] = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") + val showPlugins: Setting[Boolean] = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.") + val pluginsDir: Setting[String] = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) + val pluginOptions: Setting[List[String]] = MultiStringSetting ("-P", "plugin:opt", "Pass an option to a plugin, e.g. -P::") + +/** -V "Verbose" settings */ +private sealed trait VerboseSettings: + self: SettingGroup => + val Vhelp: Setting[Boolean] = BooleanSetting("-V", "Print a synopsis of verbose options.") + val Xprint: Setting[List[String]] = PhasesSetting("-Vprint", "Print out program after", aliases = List("-Xprint")) + val XshowPhases: Setting[Boolean] = BooleanSetting("-Vphases", "List compiler phases.", aliases = List("-Xshow-phases")) + + val Vprofile: Setting[Boolean] = BooleanSetting("-Vprofile", "Show metrics about sources and internal representations to estimate compile-time complexity.") + val VprofileSortedBy = ChoiceSetting("-Vprofile-sorted-by", "key", "Show metrics about sources and internal representations sorted by given column name", List("name", "path", "lines", "tokens", "tasty", "complexity"), "") + val VprofileDetails = IntSetting("-Vprofile-details", "Show metrics about sources and internal representations of the most complex methods", 0) + val VreplMaxPrintElements: Setting[Int] = IntSetting("-Vrepl-max-print-elements", "Number of elements to be printed before output is truncated.", 1000) + val VreplMaxPrintCharacters: Setting[Int] = IntSetting("-Vrepl-max-print-characters", "Number of characters to be printed before output is truncated.", 50000) + +/** -W "Warnings" settings + */ +private sealed trait WarningSettings: + self: SettingGroup => + val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") + val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) + + val Wunused: Setting[List[String]] = MultiChoiceSetting( + name = "-Wunused", + helpArg = "warning", + descr = "Enable or disable specific `unused` warnings", + choices = List("nowarn", "all"), + default = Nil + ) + object WunusedHas: + def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) + def nowarn(using Context) = allOr("nowarn") + + val Wconf: Setting[List[String]] = MultiStringSetting( + "-Wconf", + "patterns", + default = List(), + descr = + s"""Configure compiler warnings. + |Syntax: -Wconf::,:,... + |multiple are combined with &, i.e., &...& + | + | + | - Any message: any + | + | - Message categories: cat=deprecation, cat=feature, cat=unchecked + | + | - Message content: msg=regex + | The regex need only match some part of the message, not all of it. + | + | - Message id: id=E129 + | The message id is printed with the warning. + | + | - Message name: name=PureExpressionInStatementPosition + | The message name is printed with the warning in verbose warning mode. + | + |In verbose warning mode the compiler prints matching filters for warnings. + |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally + |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). + | + | + | - error / e + | - warning / w + | - verbose / v (emit warning, show additional help for writing `-Wconf` filters) + | - info / i (infos are not counted as warnings and not affected by `-Werror`) + | - silent / s + | + |The default configuration is empty. + | + |User-defined configurations are added to the left. The leftmost rule matching + |a warning message defines the action. + | + |Examples: + | - change every warning into an error: -Wconf:any:error + | - silence deprecations: -Wconf:cat=deprecation:s + | + |Note: on the command-line you might need to quote configurations containing `*` or `&` + |to prevent the shell from expanding patterns.""".stripMargin, + ) + +/** -X "Extended" or "Advanced" settings */ +private sealed trait XSettings: + self: SettingGroup => + + val Xhelp: Setting[Boolean] = BooleanSetting("-X", "Print a synopsis of advanced options.") + val XnoForwarders: Setting[Boolean] = BooleanSetting("-Xno-forwarders", "Do not generate static forwarders in mirror classes.") + val XmaxInlines: Setting[Int] = IntSetting("-Xmax-inlines", "Maximal number of successive inlines.", 32) + val XmaxInlinedTrees: Setting[Int] = IntSetting("-Xmax-inlined-trees", "Maximal number of inlined trees.", 2_000_000) + val Xmigration: Setting[ScalaVersion] = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.") + val XprintTypes: Setting[Boolean] = BooleanSetting("-Xprint-types", "Print tree types (debugging option).") + val XprintDiff: Setting[Boolean] = BooleanSetting("-Xprint-diff", "Print changed parts of the tree since last print.") + val XprintDiffDel: Setting[Boolean] = BooleanSetting("-Xprint-diff-del", "Print changed parts of the tree since last print including deleted parts.") + val XprintInline: Setting[Boolean] = BooleanSetting("-Xprint-inline", "Show where inlined code comes from.") + val XprintSuspension: Setting[Boolean] = BooleanSetting("-Xprint-suspension", "Show when code is suspended until macros are compiled.") + val Xprompt: Setting[Boolean] = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).") + val XreplDisableDisplay: Setting[Boolean] = BooleanSetting("-Xrepl-disable-display", "Do not display definitions in REPL.") + val XverifySignatures: Setting[Boolean] = BooleanSetting("-Xverify-signatures", "Verify generic signatures in generated bytecode.") + val XignoreScala2Macros: Setting[Boolean] = BooleanSetting("-Xignore-scala2-macros", "Ignore errors when compiling code that calls Scala2 macros, these will fail at runtime.") + val XimportSuggestionTimeout: Setting[Int] = IntSetting("-Ximport-suggestion-timeout", "Timeout (in ms) for searching for import suggestions when errors are reported.", 8000) + val Xsemanticdb: Setting[Boolean] = BooleanSetting("-Xsemanticdb", "Store information in SemanticDB.", aliases = List("-Ysemanticdb")) + val XuncheckedJavaOutputVersion: Setting[String] = ChoiceSetting("-Xunchecked-java-output-version", "target", "Emit bytecode for the specified version of the Java platform. This might produce bytecode that will break at runtime. Corresponds to -target flag in javac. When on JDK 9+, consider -java-output-version as a safer alternative.", ScalaSettings.supportedTargetVersions, "", aliases = List("-Xtarget", "--Xtarget")) + val XcheckMacros: Setting[Boolean] = BooleanSetting("-Xcheck-macros", "Check some invariants of macro generated code while expanding macros", aliases = List("--Xcheck-macros")) + val XmainClass: Setting[String] = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") + val XimplicitSearchLimit: Setting[Int] = IntSetting("-Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) + + val XmixinForceForwarders = ChoiceSetting( + name = "-Xmixin-force-forwarders", + helpArg = "mode", + descr = "Generate forwarder methods in classes inhering concrete methods from traits.", + choices = List("true", "junit", "false"), + default = "true") + + object mixinForwarderChoices { + def isTruthy(using Context) = XmixinForceForwarders.value == "true" + def isAtLeastJunit(using Context) = isTruthy || XmixinForceForwarders.value == "junit" + } + + val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") +end XSettings + +/** -Y "Forking" as in forked tongue or "Private" settings */ +private sealed trait YSettings: + self: SettingGroup => + + val Yhelp: Setting[Boolean] = BooleanSetting("-Y", "Print a synopsis of private options.") + val Ycheck: Setting[List[String]] = PhasesSetting("-Ycheck", "Check the tree at the end of") + val YcheckMods: Setting[Boolean] = BooleanSetting("-Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync.") + val Ydebug: Setting[Boolean] = BooleanSetting("-Ydebug", "Increase the quantity of debugging output.") + val YdebugTrace: Setting[Boolean] = BooleanSetting("-Ydebug-trace", "Trace core operations.") + val YdebugFlags: Setting[Boolean] = BooleanSetting("-Ydebug-flags", "Print all flags of definitions.") + val YdebugMissingRefs: Setting[Boolean] = BooleanSetting("-Ydebug-missing-refs", "Print a stacktrace when a required symbol is missing.") + val YdebugNames: Setting[Boolean] = BooleanSetting("-Ydebug-names", "Show internal representation of names.") + val YdebugPos: Setting[Boolean] = BooleanSetting("-Ydebug-pos", "Show full source positions including spans.") + val YdebugTreeWithId: Setting[Int] = IntSetting("-Ydebug-tree-with-id", "Print the stack trace when the tree with the given id is created.", Int.MinValue) + val YdebugTypeError: Setting[Boolean] = BooleanSetting("-Ydebug-type-error", "Print the stack trace when a TypeError is caught", false) + val YdebugError: Setting[Boolean] = BooleanSetting("-Ydebug-error", "Print the stack trace when any error is caught.", false) + val YdebugUnpickling: Setting[Boolean] = BooleanSetting("-Ydebug-unpickling", "Print the stack trace when an error occurs when reading Tasty.", false) + val YtermConflict: Setting[String] = ChoiceSetting("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") + val Ylog: Setting[List[String]] = PhasesSetting("-Ylog", "Log operations during") + val YlogClasspath: Setting[Boolean] = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.") + val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") + + val Yscala2Unpickler: Setting[String] = StringSetting("-Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") + + val YnoImports: Setting[Boolean] = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") + val YnoGenericSig: Setting[Boolean] = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.") + val YnoPredef: Setting[Boolean] = BooleanSetting("-Yno-predef", "Compile without importing Predef.") + val Yskip: Setting[List[String]] = PhasesSetting("-Yskip", "Skip") + val Ydumpclasses: Setting[String] = StringSetting("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") + val YstopAfter: Setting[List[String]] = PhasesSetting("-Ystop-after", "Stop after", aliases = List("-stop")) // backward compat + val YstopBefore: Setting[List[String]] = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully + val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") + val YdetailedStats: Setting[Boolean] = BooleanSetting("-Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") + val YkindProjector: Setting[String] = ChoiceSetting("-Ykind-projector", "[underscores, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable") + val YprintPos: Setting[Boolean] = BooleanSetting("-Yprint-pos", "Show tree positions.") + val YprintPosSyms: Setting[Boolean] = BooleanSetting("-Yprint-pos-syms", "Show symbol definitions positions.") + val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting("-Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") + val YnoPatmatOpt: Setting[Boolean] = BooleanSetting("-Yno-patmat-opt", "Disable all pattern matching optimizations.") + val YplainPrinter: Setting[Boolean] = BooleanSetting("-Yplain-printer", "Pretty-print using a plain printer.") + val YprintSyms: Setting[Boolean] = BooleanSetting("-Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") + val YprintDebug: Setting[Boolean] = BooleanSetting("-Yprint-debug", "When printing trees, print some extra information useful for debugging.") + val YprintDebugOwners: Setting[Boolean] = BooleanSetting("-Yprint-debug-owners", "When printing trees, print owners of definitions.") + val YprintLevel: Setting[Boolean] = BooleanSetting("-Yprint-level", "print nesting levels of symbols and type variables.") + val YshowPrintErrors: Setting[Boolean] = BooleanSetting("-Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") + val YtestPickler: Setting[Boolean] = BooleanSetting("-Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") + val YcheckReentrant: Setting[Boolean] = BooleanSetting("-Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") + val YdropComments: Setting[Boolean] = BooleanSetting("-Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) + val YcookComments: Setting[Boolean] = BooleanSetting("-Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) + val YreadComments: Setting[Boolean] = BooleanSetting("-Yread-docs", "Read documentation from tasty.") + val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") + val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") + val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") + val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") + val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") + val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") + val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") + val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features") + + val YprofileEnabled: Setting[Boolean] = BooleanSetting("-Yprofile-enabled", "Enable profiling.") + val YprofileDestination: Setting[String] = StringSetting("-Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") + //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileExternalTool: Setting[List[String]] = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") + //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting("-Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") + //.withPostSetHook( _ => YprofileEnabled.value = true ) + + // Experimental language features + val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting("-Yno-kind-polymorphism", "Disable kind polymorphism.") + val YexplicitNulls: Setting[Boolean] = BooleanSetting("-Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") + val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects") + val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation") + val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") + val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") + val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") + val YlightweightLazyVals: Setting[Boolean] = BooleanSetting("-Ylightweight-lazy-vals", "Use experimental lightweight implementation of lazy vals") + + /** Area-specific debug output */ + val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") + val YnoDoubleBindings: Setting[Boolean] = BooleanSetting("-Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).") + val YshowVarBounds: Setting[Boolean] = BooleanSetting("-Yshow-var-bounds", "Print type variables with their bounds.") + + val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting("-Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") + + val Yinstrument: Setting[Boolean] = BooleanSetting("-Yinstrument", "Add instrumentation code that counts allocations and closure creations.") + val YinstrumentDefs: Setting[Boolean] = BooleanSetting("-Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") + + val YforceInlineWhileTyping: Setting[Boolean] = BooleanSetting("-Yforce-inline-while-typing", "Make non-transparent inline methods inline when typing. Emulates the old inlining behavior of 3.0.0-M3.") +end YSettings + diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala new file mode 100644 index 000000000000..7fdf57478f1a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala @@ -0,0 +1,188 @@ +/* @author James Iry + */ +package dotty.tools +package dotc.config + +import scala.language.unsafeNulls + +import scala.annotation.internal.sharable +import scala.util.{Try, Success, Failure} + +/** + * Represents a single Scala version in a manner that + * supports easy comparison and sorting. + */ +sealed abstract class ScalaVersion extends Ordered[ScalaVersion] { + def unparse: String +} + +/** + * A scala version that sorts higher than all actual versions + */ +@sharable case object NoScalaVersion extends ScalaVersion { + def unparse: String = "none" + + def compare(that: ScalaVersion): Int = that match { + case NoScalaVersion => 0 + case _ => 1 + } +} + +/** + * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion + * may or may not be a released version - i.e. this same class is used to represent + * final, release candidate, milestone, and development builds. The build argument is used + * to segregate builds + */ +case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion { + def unparse: String = s"${major}.${minor}.${rev}.${build.unparse}" + + def compare(that: ScalaVersion): Int = that match { + case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) => + // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these + // comparisons a lot so I'm using brute force direct style code + if (major < thatMajor) -1 + else if (major > thatMajor) 1 + else if (minor < thatMinor) -1 + else if (minor > thatMinor) 1 + else if (rev < thatRev) -1 + else if (rev > thatRev) 1 + else build compare thatBuild + case AnyScalaVersion => 1 + case NoScalaVersion => -1 + } +} + +/** + * A Scala version that sorts lower than all actual versions + */ +@sharable case object AnyScalaVersion extends ScalaVersion { + def unparse: String = "any" + + def compare(that: ScalaVersion): Int = that match { + case AnyScalaVersion => 0 + case _ => -1 + } +} + +/** + * Methods for parsing ScalaVersions + */ +@sharable object ScalaVersion { + private val dot = "\\." + private val dash = "\\-" + private def not(s:String) = s"[^${s}]" + private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r + + def parse(versionString : String): Try[ScalaVersion] = { + def failure = Failure(new NumberFormatException( + s"There was a problem parsing ${versionString}. " + + "Versions should be in the form major[.minor[.revision]] " + + "where each part is a positive number, as in 2.10.1. " + + "The minor and revision parts are optional." + )) + + def toInt(s: String) = s match { + case null | "" => 0 + case _ => s.toInt + } + + def isInt(s: String) = Try(toInt(s)).isSuccess + + import ScalaBuild._ + + def toBuild(s: String) = s match { + case null | "FINAL" => Final + case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2))) + case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1))) + case _ => Development(s) + } + + try versionString match { + case "" | "any" => Success(AnyScalaVersion) + case "none" => Success(NoScalaVersion) + case R(_, majorS, _, minorS, _, revS, _, buildS) => + Success(SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))) + case _ => failure + } + catch { + case e: NumberFormatException => failure + } + } + + /** + * The version of the compiler running now + */ + val current: ScalaVersion = parse(util.Properties.versionNumberString).get +} + +/** + * Represents the data after the dash in major.minor.rev-build + */ +abstract class ScalaBuild extends Ordered[ScalaBuild] { + /** + * Return a version of this build information that can be parsed back into the + * same ScalaBuild + */ + def unparse: String +} + +object ScalaBuild { + + /** A development, test, nightly, snapshot or other "unofficial" build + */ + case class Development(id: String) extends ScalaBuild { + def unparse: String = s"-${id}" + + def compare(that: ScalaBuild): Int = that match { + // sorting two development builds based on id is reasonably valid for two versions created with the same schema + // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions + // this is a pragmatic compromise + case Development(thatId) => id compare thatId + // assume a development build is newer than anything else, that's not really true, but good luck + // mapping development build versions to other build types + case _ => 1 + } + } + + /** A final build + */ + case object Final extends ScalaBuild { + def unparse: String = "" + + def compare(that: ScalaBuild): Int = that match { + case Final => 0 + // a final is newer than anything other than a development build or another final + case Development(_) => -1 + case _ => 1 + } + } + + /** A candidate for final release + */ + case class RC(n: Int) extends ScalaBuild { + def unparse: String = s"-RC${n}" + + def compare(that: ScalaBuild): Int = that match { + // compare two rcs based on their RC numbers + case RC(thatN) => n - thatN + // an rc is older than anything other than a milestone or another rc + case Milestone(_) => 1 + case _ => -1 + } + } + + /** An intermediate release + */ + case class Milestone(n: Int) extends ScalaBuild { + def unparse: String = s"-M${n}" + + def compare(that: ScalaBuild): Int = that match { + // compare two milestones based on their milestone numbers + case Milestone(thatN) => n - thatN + // a milestone is older than anything other than another milestone + case _ => -1 + } + } +} + diff --git a/tests/pos-with-compiler-cc/dotc/config/Settings.scala b/tests/pos-with-compiler-cc/dotc/config/Settings.scala new file mode 100644 index 000000000000..277833afbd5d --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/Settings.scala @@ -0,0 +1,295 @@ +package dotty.tools.dotc +package config + +import scala.language.unsafeNulls + +import core.Contexts._ + +import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory} + +import annotation.tailrec +import collection.mutable.ArrayBuffer +import reflect.ClassTag +import scala.util.{Success, Failure} + +object Settings: + + val BooleanTag: ClassTag[Boolean] = ClassTag.Boolean + val IntTag: ClassTag[Int] = ClassTag.Int + val StringTag: ClassTag[String] = ClassTag(classOf[String]) + val ListTag: ClassTag[List[?]] = ClassTag(classOf[List[?]]) + val VersionTag: ClassTag[ScalaVersion] = ClassTag(classOf[ScalaVersion]) + val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) + val OutputTag: ClassTag[AbstractFile] = ClassTag(classOf[AbstractFile]) + + class SettingsState(initialValues: Seq[Any]): + private val values = ArrayBuffer(initialValues: _*) + private var _wasRead: Boolean = false + + override def toString: String = s"SettingsState(values: ${values.toList})" + + def value(idx: Int): Any = + _wasRead = true + values(idx) + + def update(idx: Int, x: Any): SettingsState = + if (_wasRead) then SettingsState(values.toSeq).update(idx, x) + else + values(idx) = x + this + end SettingsState + + case class ArgsSummary( + sstate: SettingsState, + arguments: List[String], + errors: List[String], + warnings: List[String]) { + + def fail(msg: String): Settings.ArgsSummary = + ArgsSummary(sstate, arguments.tail, errors :+ msg, warnings) + + def warn(msg: String): Settings.ArgsSummary = + ArgsSummary(sstate, arguments.tail, errors, warnings :+ msg) + } + + case class Setting[T: ClassTag] private[Settings] ( + name: String, + description: String, + default: T, + helpArg: String = "", + choices: Option[Seq[?]] = None, + prefix: String = "", + aliases: List[String] = Nil, + depends: List[(Setting[?], Any)] = Nil, + propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int) { + + private var changed: Boolean = false + + def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] + + def updateIn(state: SettingsState, x: Any): SettingsState = x match + case _: T => state.update(idx, x) + case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${implicitly[ClassTag[T]]}") + + def isDefaultIn(state: SettingsState): Boolean = valueIn(state) == default + + def isMultivalue: Boolean = implicitly[ClassTag[T]] == ListTag + + def legalChoices: String = + choices match { + case Some(xs) if xs.isEmpty => "" + case Some(r: Range) => s"${r.head}..${r.last}" + case Some(xs) => xs.mkString(", ") + case None => "" + } + + def tryToSet(state: ArgsSummary): ArgsSummary = { + val ArgsSummary(sstate, arg :: args, errors, warnings) = state: @unchecked + def update(value: Any, args: List[String]): ArgsSummary = + var dangers = warnings + val value1 = + if changed && isMultivalue then + val value0 = value.asInstanceOf[List[String]] + val current = valueIn(sstate).asInstanceOf[List[String]] + value0.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") + current ++ value0 + else + if changed then dangers :+= s"Flag $name set repeatedly" + value + changed = true + ArgsSummary(updateIn(sstate, value1), args, errors, dangers) + end update + + def fail(msg: String, args: List[String]) = + ArgsSummary(sstate, args, errors :+ msg, warnings) + + def missingArg = + fail(s"missing argument for option $name", args) + + def setString(argValue: String, args: List[String]) = + choices match + case Some(xs) if !xs.contains(argValue) => + fail(s"$argValue is not a valid choice for $name", args) + case _ => + update(argValue, args) + + def setInt(argValue: String, args: List[String]) = + try + val x = argValue.toInt + choices match + case Some(r: Range) if x < r.head || r.last < x => + fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name", args) + case Some(xs) if !xs.contains(x) => + fail(s"$argValue is not a valid choice for $name", args) + case _ => + update(x, args) + catch case _: NumberFormatException => + fail(s"$argValue is not an integer argument for $name", args) + + def doSet(argRest: String) = ((implicitly[ClassTag[T]], args): @unchecked) match { + case (BooleanTag, _) => + update(true, args) + case (OptionTag, _) => + update(Some(propertyClass.get.getConstructor().newInstance()), args) + case (ListTag, _) => + if (argRest.isEmpty) missingArg + else + val strings = argRest.split(",").toList + choices match + case Some(valid) => strings.filterNot(valid.contains) match + case Nil => update(strings, args) + case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) + case _ => update(strings, args) + case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => + setString(argRest, args) + case (StringTag, arg2 :: args2) => + if (arg2 startsWith "-") missingArg + else setString(arg2, args2) + case (OutputTag, arg :: args) => + val path = Directory(arg) + val isJar = path.extension == "jar" + if (!isJar && !path.isDirectory) + fail(s"'$arg' does not exist or is not a directory or .jar file", args) + else { + val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) + update(output, args) + } + case (IntTag, args) if argRest.nonEmpty => + setInt(argRest, args) + case (IntTag, arg2 :: args2) => + setInt(arg2, args2) + case (VersionTag, _) => + ScalaVersion.parse(argRest) match { + case Success(v) => update(v, args) + case Failure(ex) => fail(ex.getMessage, args) + } + case (_, Nil) => + missingArg + } + + def matches(argName: String) = (name :: aliases).exists(_ == argName) + + if (prefix != "" && arg.startsWith(prefix)) + doSet(arg drop prefix.length) + else if (prefix == "" && matches(arg.takeWhile(_ != ':'))) + doSet(arg.dropWhile(_ != ':').drop(1)) + else + state + } + } + + object Setting: + extension [T](setting: Setting[T]) + def value(using Context): T = setting.valueIn(ctx.settingsState) + def update(x: T)(using Context): SettingsState = setting.updateIn(ctx.settingsState, x) + def isDefault(using Context): Boolean = setting.isDefaultIn(ctx.settingsState) + + class SettingGroup { + + private val _allSettings = new ArrayBuffer[Setting[?]] + def allSettings: Seq[Setting[?]] = _allSettings.toSeq + + def defaultState: SettingsState = new SettingsState(allSettings map (_.default)) + + def userSetSettings(state: SettingsState): Seq[Setting[?]] = + allSettings filterNot (_.isDefaultIn(state)) + + def toConciseString(state: SettingsState): String = + userSetSettings(state).mkString("(", " ", ")") + + private def checkDependencies(state: ArgsSummary): ArgsSummary = + userSetSettings(state.sstate).foldLeft(state)(checkDependenciesOfSetting) + + private def checkDependenciesOfSetting(state: ArgsSummary, setting: Setting[?]) = + setting.depends.foldLeft(state) { (s, dep) => + val (depSetting, reqValue) = dep + if (depSetting.valueIn(state.sstate) == reqValue) s + else s.fail(s"incomplete option ${setting.name} (requires ${depSetting.name})") + } + + /** Iterates over the arguments applying them to settings where applicable. + * Then verifies setting dependencies are met. + * + * This takes a boolean indicating whether to keep + * processing if an argument is seen which is not a command line option. + * This is an expedience for the moment so that you can say + * + * scalac -d /tmp foo.scala -optimise + * + * while also allowing + * + * scala Program opt opt + * + * to get their arguments. + */ + @tailrec + final def processArguments(state: ArgsSummary, processAll: Boolean, skipped: List[String]): ArgsSummary = + def stateWithArgs(args: List[String]) = ArgsSummary(state.sstate, args, state.errors, state.warnings) + state.arguments match + case Nil => + checkDependencies(stateWithArgs(skipped)) + case "--" :: args => + checkDependencies(stateWithArgs(skipped ++ args)) + case x :: _ if x startsWith "-" => + @tailrec def loop(settings: List[Setting[?]]): ArgsSummary = settings match + case setting :: settings1 => + val state1 = setting.tryToSet(state) + if state1 ne state then state1 + else loop(settings1) + case Nil => + state.warn(s"bad option '$x' was ignored") + processArguments(loop(allSettings.toList), processAll, skipped) + case arg :: args => + if processAll then processArguments(stateWithArgs(args), processAll, skipped :+ arg) + else state + end processArguments + + def processArguments(arguments: List[String], processAll: Boolean, settingsState: SettingsState = defaultState): ArgsSummary = + processArguments(ArgsSummary(settingsState, arguments, Nil, Nil), processAll, Nil) + + def publish[T](settingf: Int => Setting[T]): Setting[T] = { + val setting = settingf(_allSettings.length) + _allSettings += setting + setting + } + + def BooleanSetting(name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = + publish(Setting(name, descr, initialValue, aliases = aliases)) + + def StringSetting(name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(name, descr, default, helpArg, aliases = aliases)) + + def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + + def MultiChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + + def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = + publish(Setting(name, descr, default, aliases = aliases)) + + def IntChoiceSetting(name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = + publish(Setting(name, descr, default, choices = Some(choices))) + + def MultiStringSetting(name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, default, helpArg, aliases = aliases)) + + def OutputSetting(name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = + publish(Setting(name, descr, default, helpArg)) + + def PathSetting(name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = + publish(Setting(name, descr, default, aliases = aliases)) + + def PhasesSetting(name: String, descr: String, default: String = "", aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, if (default.isEmpty) Nil else List(default), aliases = aliases)) + + def PrefixSetting(name: String, pre: String, descr: String): Setting[List[String]] = + publish(Setting(name, descr, Nil, prefix = pre)) + + def VersionSetting(name: String, descr: String, default: ScalaVersion = NoScalaVersion): Setting[ScalaVersion] = + publish(Setting(name, descr, default)) + + def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = + publish(Setting(name, descr, None, propertyClass = Some(implicitly[ClassTag[T]].runtimeClass), aliases = aliases)) + } +end Settings diff --git a/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala new file mode 100644 index 000000000000..4b9b1b247856 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala @@ -0,0 +1,32 @@ +package dotty.tools +package dotc +package config + +import core.Decorators.* +import util.Property + +enum SourceVersion: + case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. + case `3.2-migration`, `3.2` + case `3.3-migration`, `3.3` + case `future-migration`, `future` + + val isMigrating: Boolean = toString.endsWith("-migration") + + def stable: SourceVersion = + if isMigrating then SourceVersion.values(ordinal + 1) else this + + def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal + +object SourceVersion extends Property.Key[SourceVersion]: + def defaultSourceVersion = `3.3` + + /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ + val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) + + /** language versions that the compiler recognises. */ + val validSourceVersionNames = values.toList.map(_.toString.toTermName) + + /** All source versions that can be recognised from a language import. e.g. `import language.3.1` */ + val allSourceVersionNames = validSourceVersionNames ::: illegalSourceVersionNames +end SourceVersion diff --git a/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala b/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala new file mode 100644 index 000000000000..5b79432a97e7 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala @@ -0,0 +1,42 @@ +package dotty.tools +package dotc +package config + +import scala.language.unsafeNulls + +/** For placing a wrapper function around property functions. + * Motivated by places like google app engine throwing exceptions + * on property lookups. + */ +trait WrappedProperties extends PropertiesTrait { + def wrap[T](body: => T): Option[T] + + protected def propCategory: String = "wrapped" + protected def pickJarBasedOn: Class[?] = this.getClass + + override def propIsSet(name: String): Boolean = wrap(super.propIsSet(name)) exists (x => x) + override def propOrElse(name: String, alt: String): String = wrap(super.propOrElse(name, alt)) getOrElse alt + override def setProp(name: String, value: String): String = wrap(super.setProp(name, value)).orNull + override def clearProp(name: String): String = wrap(super.clearProp(name)).orNull + override def envOrElse(name: String, alt: String): String = wrap(super.envOrElse(name, alt)) getOrElse alt + override def envOrNone(name: String): Option[String] = wrap(super.envOrNone(name)).flatten + + def systemProperties: Iterator[(String, String)] = { + import scala.jdk.CollectionConverters._ + wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty + } +} + +object WrappedProperties { + object AccessControl extends WrappedProperties { + def wrap[T](body: => T): Option[T] = + try Some(body) + catch { + // the actual exception we are concerned with is AccessControlException, + // but that's deprecated on JDK 17, so catching its superclass is a convenient + // way to avoid a deprecation warning + case _: SecurityException => + None + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala new file mode 100644 index 000000000000..f307d4a36697 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala @@ -0,0 +1,274 @@ +package dotty.tools +package dotc +package core + +import Symbols._, Types._, Contexts._, Constants._ +import dotty.tools.dotc.ast.tpd, tpd.* +import util.Spans.Span +import printing.{Showable, Printer} +import printing.Texts.Text +import annotation.internal.sharable +import language.experimental.pureFunctions +import annotation.retains + +object Annotations { + + def annotClass(tree: Tree)(using Context) = + if (tree.symbol.isConstructor) tree.symbol.owner + else tree.tpe.typeSymbol + + abstract class Annotation extends Showable, caps.Pure { + + def tree(using Context): Tree + + def symbol(using Context): Symbol = annotClass(tree) + + def hasSymbol(sym: Symbol)(using Context) = symbol == sym + + def matches(cls: Symbol)(using Context): Boolean = symbol.derivesFrom(cls) + + def appliesToModule: Boolean = true // for now; see remark in SymDenotations + + def derivedAnnotation(tree: Tree)(using Context): Annotation = + if (tree eq this.tree) this else Annotation(tree) + + /** All arguments to this annotation in a single flat list */ + def arguments(using Context): List[Tree] = tpd.allArguments(tree) + + def argument(i: Int)(using Context): Option[Tree] = { + val args = arguments + if (i < args.length) Some(args(i)) else None + } + def argumentConstant(i: Int)(using Context): Option[Constant] = + for (case ConstantType(c) <- argument(i) map (_.tpe.widenTermRefExpr.normalized)) yield c + + def argumentConstantString(i: Int)(using Context): Option[String] = + for (case Constant(s: String) <- argumentConstant(i)) yield s + + /** The tree evaluaton is in progress. */ + def isEvaluating: Boolean = false + + /** The tree evaluation has finished. */ + def isEvaluated: Boolean = true + + /** Normally, applies a type map to all tree nodes of this annotation, but can + * be overridden. Returns EmptyAnnotation if type type map produces a range + * type, since ranges cannot be types of trees. + */ + def mapWith(tm: TypeMap @retains(caps.cap))(using Context) = + val args = arguments + if args.isEmpty then this + else + val findDiff = new TreeAccumulator[Type]: + def apply(x: Type, tree: Tree)(using Context): Type = + if tm.isRange(x) then x + else + val tp1 = tm(tree.tpe) + foldOver(if tp1 frozen_=:= tree.tpe then x else tp1, tree) + val diff = findDiff(NoType, args) + if tm.isRange(diff) then EmptyAnnotation + else if diff.exists then derivedAnnotation(tm.mapOver(tree)) + else this + + /** Does this annotation refer to a parameter of `tl`? */ + def refersToParamOf(tl: TermLambda)(using Context): Boolean = + val args = arguments + if args.isEmpty then false + else tree.existsSubTree { + case id: Ident => id.tpe.stripped match + case TermParamRef(tl1, _) => tl eq tl1 + case _ => false + case _ => false + } + + /** A string representation of the annotation. Overridden in BodyAnnotation. + */ + def toText(printer: Printer): Text = printer.annotText(this) + + def ensureCompleted(using Context): Unit = tree + + def sameAnnotation(that: Annotation)(using Context): Boolean = + symbol == that.symbol && tree.sameTree(that.tree) + + /** Operations for hash-consing, can be overridden */ + def hash: Int = System.identityHashCode(this) + def eql(that: Annotation) = this eq that + } + + case class ConcreteAnnotation(t: Tree) extends Annotation: + def tree(using Context): Tree = t + + abstract class LazyAnnotation extends Annotation { + protected var mySym: Symbol | (Context ?-> Symbol) | Null + override def symbol(using parentCtx: Context): Symbol = + assert(mySym != null) + mySym match { + case symFn: (Context ?-> Symbol) @unchecked => + mySym = null + mySym = atPhaseBeforeTransforms(symFn) + // We should always produce the same annotation tree, no matter when the + // annotation is evaluated. Setting the phase to a pre-transformation phase + // seems to be enough to ensure this (note that after erasure, `ctx.typer` + // will be the Erasure typer, but that doesn't seem to affect the annotation + // trees we create, so we leave it as is) + case sym: Symbol if sym.defRunId != parentCtx.runId => + mySym = sym.denot.current.symbol + case _ => + } + mySym.asInstanceOf[Symbol] + + protected var myTree: Tree | (Context ?-> Tree) | Null + def tree(using Context): Tree = + assert(myTree != null) + myTree match { + case treeFn: (Context ?-> Tree) @unchecked => + myTree = null + myTree = atPhaseBeforeTransforms(treeFn) + case _ => + } + myTree.asInstanceOf[Tree] + + override def isEvaluating: Boolean = myTree == null + override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] + } + + class DeferredSymAndTree(symFn: Context ?-> Symbol, treeFn: Context ?-> Tree) + extends LazyAnnotation: + protected var mySym: Symbol | (Context ?-> Symbol) | Null = ctx ?=> symFn(using ctx) + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) + + /** An annotation indicating the body of a right-hand side, + * typically of an inline method. Treated specially in + * pickling/unpickling and TypeTreeMaps + */ + abstract class BodyAnnotation extends Annotation { + override def symbol(using Context): ClassSymbol = defn.BodyAnnot + override def derivedAnnotation(tree: Tree)(using Context): Annotation = + if (tree eq this.tree) this else ConcreteBodyAnnotation(tree) + override def arguments(using Context): List[Tree] = Nil + override def ensureCompleted(using Context): Unit = () + override def toText(printer: Printer): Text = "@Body" + } + + class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation { + def tree(using Context): Tree = body + } + + abstract class LazyBodyAnnotation extends BodyAnnotation { + // Copy-pasted from LazyAnnotation to avoid having to turn it into a trait + protected var myTree: Tree | (Context ?-> Tree) | Null + def tree(using Context): Tree = + assert(myTree != null) + myTree match { + case treeFn: (Context ?-> Tree) @unchecked => + myTree = null + myTree = atPhaseBeforeTransforms(treeFn) + case _ => + } + myTree.asInstanceOf[Tree] + + override def isEvaluating: Boolean = myTree == null + override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] + } + + object LazyBodyAnnotation { + def apply(bodyFn: Context ?-> Tree): LazyBodyAnnotation = + new LazyBodyAnnotation: + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> bodyFn(using ctx) + } + + object Annotation { + + def apply(tree: Tree): ConcreteAnnotation = ConcreteAnnotation(tree) + + def apply(cls: ClassSymbol)(using Context): Annotation = + apply(cls, Nil) + + def apply(cls: ClassSymbol, arg: Tree)(using Context): Annotation = + apply(cls, arg :: Nil) + + def apply(cls: ClassSymbol, arg1: Tree, arg2: Tree)(using Context): Annotation = + apply(cls, arg1 :: arg2 :: Nil) + + def apply(cls: ClassSymbol, args: List[Tree])(using Context): Annotation = + apply(cls.typeRef, args) + + def apply(atp: Type, arg: Tree)(using Context): Annotation = + apply(atp, arg :: Nil) + + def apply(atp: Type, arg1: Tree, arg2: Tree)(using Context): Annotation = + apply(atp, arg1 :: arg2 :: Nil) + + def apply(atp: Type, args: List[Tree])(using Context): Annotation = + apply(New(atp, args)) + + /** Create an annotation where the tree is computed lazily. */ + def deferred(sym: Symbol)(treeFn: Context ?-> Tree): Annotation = + new LazyAnnotation { + protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) + protected var mySym: Symbol | (Context ?-> Symbol) | Null = sym + } + + /** Create an annotation where the symbol and the tree are computed lazily. */ + def deferredSymAndTree(symFn: Context ?-> Symbol)(treeFn: Context ?-> Tree): Annotation = + DeferredSymAndTree(symFn, treeFn) + + /** Extractor for child annotations */ + object Child { + + /** A deferred annotation to the result of a given child computation */ + def later(delayedSym: Context ?-> Symbol, span: Span)(using Context): Annotation = { + def makeChildLater(using Context) = { + val sym = delayedSym + New(defn.ChildAnnot.typeRef.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil) + .withSpan(span) + } + deferred(defn.ChildAnnot)(makeChildLater) + } + + /** A regular, non-deferred Child annotation */ + def apply(sym: Symbol, span: Span)(using Context): Annotation = later(sym, span) + + def unapply(ann: Annotation)(using Context): Option[Symbol] = + if (ann.symbol == defn.ChildAnnot) { + val AppliedType(_, (arg: NamedType) :: Nil) = ann.tree.tpe: @unchecked + Some(arg.symbol) + } + else None + } + + def makeSourceFile(path: String)(using Context): Annotation = + apply(defn.SourceFileAnnot, Literal(Constant(path))) + } + + @sharable val EmptyAnnotation = Annotation(EmptyTree) + + def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = { + val tref = cls.typeRef + Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref)) + } + + /** Extracts the type of the thrown exception from an annotation. + * + * Supports both "old-style" `@throws(classOf[Exception])` + * as well as "new-style" `@throws[Exception]("cause")` annotations. + */ + object ThrownException { + def unapply(a: Annotation)(using Context): Option[Type] = + if (a.symbol ne defn.ThrowsAnnot) + None + else a.argumentConstant(0) match { + // old-style: @throws(classOf[Exception]) (which is throws[T](classOf[Exception])) + case Some(Constant(tpe: Type)) => + Some(tpe) + // new-style: @throws[Exception], @throws[Exception]("cause") + case _ => + stripApply(a.tree) match { + case TypeApply(_, List(tpt)) => + Some(tpt.tpe) + case _ => + None + } + } + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Atoms.scala b/tests/pos-with-compiler-cc/dotc/core/Atoms.scala new file mode 100644 index 000000000000..bcaaf6794107 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Atoms.scala @@ -0,0 +1,36 @@ +package dotty.tools +package dotc +package core + +import Types._ + +/** Indicates the singleton types that a type must or may consist of. + * @param lo The lower bound: singleton types in this set are guaranteed + * to be in the carrier type. + * @param hi The upper bound: all singleton types in the carrier type are + * guaranteed to be in this set + * If the underlying type of a singleton type is another singleton type, + * only the latter type ends up in the sets. + */ +enum Atoms: + case Range(lo: Set[Type], hi: Set[Type]) + case Unknown + + def & (that: Atoms): Atoms = this match + case Range(lo1, hi1) => + that match + case Range(lo2, hi2) => Range(lo1 & lo2, hi1 & hi2) + case Unknown => Range(Set.empty, hi1) + case Unknown => + that match + case Range(lo2, hi2) => Range(Set.empty, hi2) + case Unknown => Unknown + + def | (that: Atoms): Atoms = this match + case Range(lo1, hi1) => + that match + case Range(lo2, hi2) => Range(lo1 | lo2, hi1 | hi2) + case Unknown => Unknown + case Unknown => Unknown + +end Atoms diff --git a/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala new file mode 100644 index 000000000000..47fa84b467d8 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala @@ -0,0 +1,216 @@ +package dotty.tools +package dotc +package core + +import Contexts._, Types._, Symbols._, Names._, Flags._ +import Denotations.SingleDenotation +import Decorators._ +import collection.mutable +import config.SourceVersion.future +import config.Feature.sourceVersion +import annotation.constructorOnly + +/** Realizability status */ +object CheckRealizable { + + sealed abstract class Realizability(val msg: String) extends caps.Pure { + def andAlso(other: => Realizability): Realizability = + if (this == Realizable) other else this + def mapError(f: Realizability -> Context ?-> Realizability)(using Context): Realizability = + if (this == Realizable) this else f(this) + } + + object Realizable extends Realizability("") + + object NotConcrete extends Realizability(" is not a concrete type") + + class NotFinal(sym: Symbol)(using @constructorOnly ctx: Context) + extends Realizability(i" refers to nonfinal $sym") + + class HasProblemBounds(name: Name, info: Type)(using @constructorOnly ctx: Context) + extends Realizability(i" has a member $name with possibly conflicting bounds ${info.bounds.lo} <: ... <: ${info.bounds.hi}") + + class HasProblemBaseArg(typ: Type, argBounds: TypeBounds)(using @constructorOnly ctx: Context) + extends Realizability(i" has a base type $typ with possibly conflicting parameter bounds ${argBounds.lo} <: ... <: ${argBounds.hi}") + + class HasProblemBase(base1: Type, base2: Type)(using @constructorOnly ctx: Context) + extends Realizability(i" has conflicting base types $base1 and $base2") + + class HasProblemField(fld: SingleDenotation, problem: Realizability)(using @constructorOnly ctx: Context) + extends Realizability(i" has a member $fld which is not a legal path\nsince ${fld.symbol.name}: ${fld.info}${problem.msg}") + + class ProblemInUnderlying(tp: Type, problem: Realizability)(using @constructorOnly ctx: Context) + extends Realizability(i"s underlying type ${tp}${problem.msg}") { + assert(problem != Realizable) + } + + def realizability(tp: Type)(using Context): Realizability = + new CheckRealizable().realizability(tp) + + def boundsRealizability(tp: Type)(using Context): Realizability = + new CheckRealizable().boundsRealizability(tp) + + private val LateInitializedFlags = Lazy | Erased +} + +/** Compute realizability status. + * + * A type T is realizable iff it is inhabited by non-null values. This ensures that its type members have good bounds + * (in the sense from DOT papers). A type projection T#L is legal if T is realizable, and can be understood as + * Scala 2's `v.L forSome { val v: T }`. + * + * In general, a realizable type can have multiple inhabitants, hence it need not be stable (in the sense of + * Type.isStable). + */ +class CheckRealizable(using Context) { + import CheckRealizable._ + + /** A set of all fields that have already been checked. Used + * to avoid infinite recursions when analyzing recursive types. + */ + private val checkedFields: mutable.Set[Symbol] = mutable.LinkedHashSet[Symbol]() + + /** Is symbol's definitition a lazy or erased val? + * (note we exclude modules here, because their realizability is ensured separately) + */ + private def isLateInitialized(sym: Symbol) = sym.isOneOf(LateInitializedFlags, butNot = Module) + + /** The realizability status of given type `tp`*/ + def realizability(tp: Type): Realizability = tp.dealias match { + /* + * A `TermRef` for a path `p` is realizable if + * - `p`'s type is stable and realizable, or + * - its underlying path is idempotent (that is, *stable*), total, and not null. + * We don't check yet the "not null" clause: that will require null-safety checking. + * + * We assume that stability of tp.prefix is checked elsewhere, since that's necessary for the path to be legal in + * the first place. + */ + case tp: TermRef => + val sym = tp.symbol + lazy val tpInfoRealizable = realizability(tp.info) + if (sym.is(StableRealizable)) realizability(tp.prefix) + else { + val r = + if (sym.isStableMember && !isLateInitialized(sym)) + // it's realizable because we know that a value of type `tp` has been created at run-time + Realizable + else if (!sym.isEffectivelyFinal) + // it's potentially not realizable since it might be overridden with a member of nonrealizable type + new NotFinal(sym) + else + // otherwise we need to look at the info to determine realizability + // roughly: it's realizable if the info does not have bad bounds + tpInfoRealizable.mapError(r => new ProblemInUnderlying(tp, r)) + r andAlso { + if (sym.isStableMember) sym.setFlag(StableRealizable) // it's known to be stable and realizable + realizability(tp.prefix) + } mapError { r => + // A mutable path is in fact stable and realizable if it has a realizable singleton type. + if (tp.info.isStable && tpInfoRealizable == Realizable) { + sym.setFlag(StableRealizable) + Realizable + } + else r + } + } + case _: SingletonType | NoPrefix => + Realizable + case tp => + def isConcrete(tp: Type): Boolean = tp.dealias match { + case tp: TypeRef => tp.symbol.isClass + case tp: TypeParamRef => false + case tp: TypeProxy => isConcrete(tp.underlying) + case tp: AndType => isConcrete(tp.tp1) && isConcrete(tp.tp2) + case tp: OrType => isConcrete(tp.tp1) && isConcrete(tp.tp2) + case _ => false + } + if (!isConcrete(tp)) NotConcrete + else boundsRealizability(tp).andAlso(memberRealizability(tp)) + } + + private def refinedNames(tp: Type): Set[Name] = tp.dealias match { + case tp: RefinedType => refinedNames(tp.parent) + tp.refinedName + case tp: AndType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) + case tp: OrType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) + case tp: TypeProxy => refinedNames(tp.superType) + case _ => Set.empty + } + + /** `Realizable` if `tp` has good bounds, a `HasProblem...` instance + * pointing to a bad bounds member otherwise. "Has good bounds" means: + * + * - all type members have good bounds (except for opaque helpers) + * - all refinements of the underlying type have good bounds (except for opaque companions) + * - all base types are class types, and if their arguments are wildcards + * they have good bounds. + * - base types do not appear in multiple instances with different arguments. + * (depending on the simplification scheme for AndTypes employed, this could + * also lead to base types with bad bounds). + */ + private def boundsRealizability(tp: Type) = { + + val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { + for { + mbr <- tp.nonClassTypeMembers + if !(mbr.info.loBound <:< mbr.info.hiBound) + } + yield new HasProblemBounds(mbr.name, mbr.info) + } + + val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { + for { + name <- refinedNames(tp) + if (name.isTypeName) + mbr <- tp.member(name).alternatives + if !(mbr.info.loBound <:< mbr.info.hiBound) + } + yield + new HasProblemBounds(name, mbr.info) + } + + def baseTypeProblems(base: Type) = base match { + case AndType(base1, base2) => + new HasProblemBase(base1, base2) :: Nil + case base => + base.argInfos.collect { + case bounds @ TypeBounds(lo, hi) if !(lo <:< hi) => + new HasProblemBaseArg(base, bounds) + } + } + val baseProblems = + tp.baseClasses.map(_.baseTypeOf(tp)).flatMap(baseTypeProblems) + + baseProblems.foldLeft( + refinementProblems.foldLeft( + memberProblems.foldLeft( + Realizable: Realizability)(_ andAlso _))(_ andAlso _))(_ andAlso _) + } + + /** `Realizable` if all of `tp`'s non-strict fields have realizable types, + * a `HasProblemField` instance pointing to a bad field otherwise. + */ + private def memberRealizability(tp: Type) = { + def checkField(sofar: Realizability, fld: SingleDenotation): Realizability = + sofar andAlso { + if (checkedFields.contains(fld.symbol) || fld.symbol.isOneOf(Private | Mutable | LateInitializedFlags)) + // if field is private it cannot be part of a visible path + // if field is mutable it cannot be part of a path + // if field is lazy or erased it does not need to be initialized when the owning object is + // so in all cases the field does not influence realizability of the enclosing object. + Realizable + else { + checkedFields += fld.symbol + realizability(fld.info).mapError(r => new HasProblemField(fld, r)) + } + } + if sourceVersion.isAtLeast(future) then + // check fields only from version 3.x. + // Reason: An embedded field could well be nullable, which means it + // should not be part of a path and need not be checked; but we cannot recognize + // this situation until we have a typesystem that tracks nullability. + tp.fields.foldLeft(Realizable: Realizability)(checkField) + else + Realizable + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Comments.scala b/tests/pos-with-compiler-cc/dotc/core/Comments.scala new file mode 100644 index 000000000000..1b20b75ad8ac --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Comments.scala @@ -0,0 +1,462 @@ +package dotty.tools +package dotc +package core + +import scala.language.unsafeNulls + +import ast.{ untpd, tpd } +import Symbols._, Contexts._ +import util.{SourceFile, ReadOnlyMap} +import util.Spans._ +import util.CommentParsing._ +import util.Property.Key +import parsing.Parsers.Parser +import reporting.ProperDefinitionNotFound + +object Comments { + val ContextDoc: Key[ContextDocstrings] = new Key[ContextDocstrings] + + /** Decorator for getting docbase out of context */ + given CommentsContext: AnyRef with + extension (c: Context) def docCtx: Option[ContextDocstrings] = c.property(ContextDoc) + + /** Context for Docstrings, contains basic functionality for getting + * docstrings via `Symbol` and expanding templates + */ + class ContextDocstrings { + + private val _docstrings: MutableSymbolMap[Comment] = MutableSymbolMap[Comment](512) // FIXME: 2nd [Comment] needed or "not a class type" + + val templateExpander: CommentExpander = new CommentExpander + + def docstrings: ReadOnlyMap[Symbol, Comment] = _docstrings + + def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym) + + def addDocstring(sym: Symbol, doc: Option[Comment]): Unit = + doc.foreach(d => _docstrings.update(sym, d)) + } + + /** + * A `Comment` contains the unformatted docstring, it's position and potentially more + * information that is populated when the comment is "cooked". + * + * @param span The position span of this `Comment`. + * @param raw The raw comment, as seen in the source code, without any expansion. + * @param expanded If this comment has been expanded, it's expansion, otherwise `None`. + * @param usecases The usecases for this comment. + */ + final case class Comment( + span: Span, + raw: String, + expanded: Option[String], + usecases: List[UseCase], + variables: Map[String, String], + ) { + + /** Has this comment been cooked or expanded? */ + def isExpanded: Boolean = expanded.isDefined + + /** The body of this comment, without the `@usecase` and `@define` sections, after expansion. */ + lazy val expandedBody: Option[String] = + expanded.map(removeSections(_, "@usecase", "@define")) + + val isDocComment: Boolean = Comment.isDocComment(raw) + + /** + * Expands this comment by giving its content to `f`, and then parsing the `@usecase` sections. + * Typically, `f` will take care of expanding the variables. + * + * @param f The expansion function. + * @return The expanded comment, with the `usecases` populated. + */ + def expand(f: String => String)(using Context): Comment = { + val expandedComment = f(raw) + val useCases = Comment.parseUsecases(expandedComment, span) + Comment(span, raw, Some(expandedComment), useCases, Map.empty) + } + } + + object Comment { + + def isDocComment(comment: String): Boolean = comment.startsWith("/**") + + def apply(span: Span, raw: String): Comment = + Comment(span, raw, None, Nil, Map.empty) + + private def parseUsecases(expandedComment: String, span: Span)(using Context): List[UseCase] = + if (!isDocComment(expandedComment)) + Nil + else + tagIndex(expandedComment) + .filter { startsWithTag(expandedComment, _, "@usecase") } + .map { case (start, end) => decomposeUseCase(expandedComment, span, start, end) } + + /** Turns a usecase section into a UseCase, with code changed to: + * {{{ + * // From: + * def foo: A + * // To: + * def foo: A = ??? + * }}} + */ + private def decomposeUseCase(body: String, span: Span, start: Int, end: Int)(using Context): UseCase = { + def subPos(start: Int, end: Int) = + if (span == NoSpan) NoSpan + else { + val start1 = span.start + start + val end1 = span.end + end + span withStart start1 withPoint start1 withEnd end1 + } + + val codeStart = skipWhitespace(body, start + "@usecase".length) + val codeEnd = skipToEol(body, codeStart) + val code = body.substring(codeStart, codeEnd) + " = ???" + val codePos = subPos(codeStart, codeEnd) + + UseCase(code, codePos) + } + } + + final case class UseCase(code: String, codePos: Span, untpdCode: untpd.Tree, tpdCode: Option[tpd.DefDef]) { + def typed(tpdCode: tpd.DefDef): UseCase = copy(tpdCode = Some(tpdCode)) + } + + object UseCase { + def apply(code: String, codePos: Span)(using Context): UseCase = { + val tree = { + val tree = new Parser(SourceFile.virtual("", code)).localDef(codePos.start) + tree match { + case tree: untpd.DefDef => + val newName = ctx.compilationUnit.freshNames.newName(tree.name, NameKinds.DocArtifactName) + untpd.cpy.DefDef(tree)(name = newName) + case _ => + report.error(ProperDefinitionNotFound(), ctx.source.atSpan(codePos)) + tree + } + } + UseCase(code, codePos, tree, None) + } + } + + /** + * Port of DocComment.scala from nsc + * @author Martin Odersky + * @author Felix Mulder + */ + class CommentExpander { + import dotc.config.Printers.scaladoc + import scala.collection.mutable + + def expand(sym: Symbol, site: Symbol)(using Context): String = { + val parent = if (site != NoSymbol) site else sym + defineVariables(parent) + expandedDocComment(sym, parent) + } + + /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing. + * + * @param sym The symbol for which doc comment is returned + * @param site The class for which doc comments are generated + * @throws ExpansionLimitExceeded when more than 10 successive expansions + * of the same string are done, which is + * interpreted as a recursive variable definition. + */ + def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(using Context): String = { + // when parsing a top level class or module, use the (module-)class itself to look up variable definitions + val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym + else site + expandVariables(cookedDocComment(sym, docStr), sym, parent) + } + + private def template(raw: String): String = + removeSections(raw, "@define") + + private def defines(raw: String): List[String] = { + val sections = tagIndex(raw) + val defines = sections filter { startsWithTag(raw, _, "@define") } + val usecases = sections filter { startsWithTag(raw, _, "@usecase") } + val end = startTag(raw, (defines ::: usecases).sortBy(_._1)) + + defines map { case (start, end) => raw.substring(start, end) } + } + + private def replaceInheritDocToInheritdoc(docStr: String): String = + docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc") + + /** The cooked doc comment of an overridden symbol */ + protected def superComment(sym: Symbol)(using Context): Option[String] = + allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "") + + private val cookedDocComments = MutableSymbolMap[String]() + + /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by + * missing sections of an inherited doc comment. + * If a symbol does not have a doc comment but some overridden version of it does, + * the doc comment of the overridden version is copied instead. + */ + def cookedDocComment(sym: Symbol, docStr: String = "")(using Context): String = cookedDocComments.getOrElseUpdate(sym, { + var ownComment = + if (docStr.length == 0) ctx.docCtx.flatMap(_.docstring(sym).map(c => template(c.raw))).getOrElse("") + else template(docStr) + ownComment = replaceInheritDocToInheritdoc(ownComment) + + superComment(sym) match { + case None => + // SI-8210 - The warning would be false negative when this symbol is a setter + if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) + scaladoc.println(s"${sym.span}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.") + ownComment.replace("@inheritdoc", "") + case Some(sc) => + if (ownComment == "") sc + else expandInheritdoc(sc, merge(sc, ownComment, sym), sym) + } + }) + + private def isMovable(str: String, sec: (Int, Int)): Boolean = + startsWithTag(str, sec, "@param") || + startsWithTag(str, sec, "@tparam") || + startsWithTag(str, sec, "@return") + + def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = { + val srcSections = tagIndex(src) + val dstSections = tagIndex(dst) + val srcParams = paramDocs(src, "@param", srcSections) + val dstParams = paramDocs(dst, "@param", dstSections) + val srcTParams = paramDocs(src, "@tparam", srcSections) + val dstTParams = paramDocs(dst, "@tparam", dstSections) + val out = new StringBuilder + var copied = 0 + var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _))) + + if (copyFirstPara) { + val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment + (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections) + out append src.substring(0, eop).trim + copied = 3 + tocopy = 3 + } + + def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match { + case Some((start, end)) => + if (end > tocopy) tocopy = end + case None => + srcSec match { + case Some((start1, end1)) => + out append dst.substring(copied, tocopy).trim + out append "\n" + copied = tocopy + out append src.substring(start1, end1).trim + case None => + } + } + + //TODO: enable this once you know how to get `sym.paramss` + /* + for (params <- sym.paramss; param <- params) + mergeSection(srcParams get param.name.toString, dstParams get param.name.toString) + for (tparam <- sym.typeParams) + mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString) + + mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections)) + mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections)) + */ + + if (out.length == 0) dst + else { + out append dst.substring(copied) + out.toString + } + } + + /** + * Expand inheritdoc tags + * - for the main comment we transform the inheritdoc into the super variable, + * and the variable expansion can expand it further + * - for the param, tparam and throws sections we must replace comments on the spot + * + * This is done separately, for two reasons: + * 1. It takes longer to run compared to merge + * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely + * impacts performance + * + * @param parent The source (or parent) comment + * @param child The child (overriding member or usecase) comment + * @param sym The child symbol + * @return The child comment with the inheritdoc sections expanded + */ + def expandInheritdoc(parent: String, child: String, sym: Symbol): String = + if (child.indexOf("@inheritdoc") == -1) + child + else { + val parentSections = tagIndex(parent) + val childSections = tagIndex(child) + val parentTagMap = sectionTagMap(parent, parentSections) + val parentNamedParams = Map() + + ("@param" -> paramDocs(parent, "@param", parentSections)) + + ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) + + ("@throws" -> paramDocs(parent, "@throws", parentSections)) + + val out = new StringBuilder + + def replaceInheritdoc(childSection: String, parentSection: => String) = + if (childSection.indexOf("@inheritdoc") == -1) + childSection + else + childSection.replace("@inheritdoc", parentSection) + + def getParentSection(section: (Int, Int)): String = { + + def getSectionHeader = extractSectionTag(child, section) match { + case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section) + case other => other + } + + def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String = + paramMap.get(param) match { + case Some(section) => + // Cleanup the section tag and parameter + val sectionTextBounds = extractSectionText(parent, section) + cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2)) + case None => + scaladoc.println(s"""${sym.span}: the """" + getSectionHeader + "\" annotation of the " + sym + + " comment contains @inheritdoc, but the corresponding section in the parent is not defined.") + "" + } + + child.substring(section._1, section._1 + 7) match { + case param@("@param "|"@tparam"|"@throws") => + sectionString(extractSectionParam(child, section), parentNamedParams(param.trim)) + case _ => + sectionString(extractSectionTag(child, section), parentTagMap) + } + } + + def mainComment(str: String, sections: List[(Int, Int)]): String = + if (str.trim.length > 3) + str.trim.substring(3, startTag(str, sections)) + else + "" + + // Append main comment + out.append("/**") + out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections))) + + // Append sections + for (section <- childSections) + out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section))) + + out.append("*/") + out.toString + } + + protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(using Context): String = { + val expandLimit = 10 + + def expandInternal(str: String, depth: Int): String = { + if (depth >= expandLimit) + throw new ExpansionLimitExceeded(str) + + val out = new StringBuilder + var copied, idx = 0 + // excluding variables written as \$foo so we can use them when + // necessary to document things like Symbol#decode + def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\' + while (idx < str.length) + if ((str charAt idx) != '$' || isEscaped) + idx += 1 + else { + val vstart = idx + idx = skipVariable(str, idx + 1) + def replaceWith(repl: String) = { + out append str.substring(copied, vstart) + out append repl + copied = idx + } + variableName(str.substring(vstart + 1, idx)) match { + case "super" => + superComment(sym) foreach { sc => + val superSections = tagIndex(sc) + replaceWith(sc.substring(3, startTag(sc, superSections))) + for (sec @ (start, end) <- superSections) + if (!isMovable(sc, sec)) out append sc.substring(start, end) + } + case "" => idx += 1 + case vname => + lookupVariable(vname, site) match { + case Some(replacement) => replaceWith(replacement) + case None => + scaladoc.println(s"Variable $vname undefined in comment for $sym in $site") + } + } + } + if (out.length == 0) str + else { + out append str.substring(copied) + expandInternal(out.toString, depth + 1) + } + } + + // We suppressed expanding \$ throughout the recursion, and now we + // need to replace \$ with $ so it looks as intended. + expandInternal(initialStr, 0).replace("""\$""", "$") + } + + def defineVariables(sym: Symbol)(using Context): Unit = { + val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r + + val raw = ctx.docCtx.flatMap(_.docstring(sym).map(_.raw)).getOrElse("") + defs(sym) ++= defines(raw).map { + str => { + val start = skipWhitespace(str, "@define".length) + val (key, value) = str.splitAt(skipVariable(str, start)) + key.drop(start) -> value + } + } map { + case (key, Trim(value)) => + variableName(key) -> value.replaceAll("\\s+\\*+$", "") + } + } + + /** Maps symbols to the variable -> replacement maps that are defined + * in their doc comments + */ + private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map() + + /** Lookup definition of variable. + * + * @param vble The variable for which a definition is searched + * @param site The class for which doc comments are generated + */ + def lookupVariable(vble: String, site: Symbol)(using Context): Option[String] = site match { + case NoSymbol => None + case _ => + val searchList = + if (site.flags.is(Flags.Module)) site :: site.info.baseClasses + else site.info.baseClasses + + searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match { + case Some(str) if str startsWith "$" => lookupVariable(str.tail, site) + case res => res orElse lookupVariable(vble, site.owner) + } + } + + /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing + * If a symbol does not have a doc comment but some overridden version of it does, + * the position of the doc comment of the overridden version is returned instead. + */ + def docCommentPos(sym: Symbol)(using Context): Span = + ctx.docCtx.flatMap(_.docstring(sym).map(_.span)).getOrElse(NoSpan) + + /** A version which doesn't consider self types, as a temporary measure: + * an infinite loop has broken out between superComment and cookedDocComment + * since r23926. + */ + private def allInheritedOverriddenSymbols(sym: Symbol)(using Context): List[Symbol] = + if (!sym.owner.isClass) Nil + else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..` + //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol) + + class ExpansionLimitExceeded(str: String) extends Exception + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constants.scala b/tests/pos-with-compiler-cc/dotc/core/Constants.scala new file mode 100644 index 000000000000..f45e9e5217de --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Constants.scala @@ -0,0 +1,261 @@ +package dotty.tools +package dotc +package core + +import Types._, Symbols._, Contexts._ +import printing.Printer +import printing.Texts.Text + +object Constants { + + inline val NoTag = 0 + inline val UnitTag = 1 + inline val BooleanTag = 2 + inline val ByteTag = 3 + inline val ShortTag = 4 + inline val CharTag = 5 + inline val IntTag = 6 + inline val LongTag = 7 + inline val FloatTag = 8 + inline val DoubleTag = 9 + inline val StringTag = 10 + inline val NullTag = 11 + inline val ClazzTag = 12 + + class Constant(val value: Any, val tag: Int) extends printing.Showable with Product1[Any] { + import java.lang.Double.doubleToRawLongBits + import java.lang.Float.floatToRawIntBits + + def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue + def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue + def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue + def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag + def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag + def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag + def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag + def isNonUnitAnyVal: Boolean = BooleanTag <= tag && tag <= DoubleTag + def isAnyVal: Boolean = UnitTag <= tag && tag <= DoubleTag + + def tpe(using Context): Type = tag match { + case UnitTag => defn.UnitType + case BooleanTag => defn.BooleanType + case ByteTag => defn.ByteType + case ShortTag => defn.ShortType + case CharTag => defn.CharType + case IntTag => defn.IntType + case LongTag => defn.LongType + case FloatTag => defn.FloatType + case DoubleTag => defn.DoubleType + case StringTag => defn.StringType + case NullTag => defn.NullType + case ClazzTag => defn.ClassType(typeValue) + } + + /** We need the equals method to take account of tags as well as values. + */ + override def equals(other: Any): Boolean = other match { + case that: Constant => + this.tag == that.tag && equalHashValue == that.equalHashValue + case _ => false + } + + def isNaN: Boolean = value match { + case f: Float => f.isNaN + case d: Double => d.isNaN + case _ => false + } + + def booleanValue: Boolean = + if (tag == BooleanTag) value.asInstanceOf[Boolean] + else throw new Error("value " + value + " is not a boolean") + + def byteValue: Byte = tag match { + case ByteTag => value.asInstanceOf[Byte] + case ShortTag => value.asInstanceOf[Short].toByte + case CharTag => value.asInstanceOf[Char].toByte + case IntTag => value.asInstanceOf[Int].toByte + case LongTag => value.asInstanceOf[Long].toByte + case FloatTag => value.asInstanceOf[Float].toByte + case DoubleTag => value.asInstanceOf[Double].toByte + case _ => throw new Error("value " + value + " is not a Byte") + } + + def shortValue: Short = tag match { + case ByteTag => value.asInstanceOf[Byte].toShort + case ShortTag => value.asInstanceOf[Short] + case CharTag => value.asInstanceOf[Char].toShort + case IntTag => value.asInstanceOf[Int].toShort + case LongTag => value.asInstanceOf[Long].toShort + case FloatTag => value.asInstanceOf[Float].toShort + case DoubleTag => value.asInstanceOf[Double].toShort + case _ => throw new Error("value " + value + " is not a Short") + } + + def charValue: Char = tag match { + case ByteTag => value.asInstanceOf[Byte].toChar + case ShortTag => value.asInstanceOf[Short].toChar + case CharTag => value.asInstanceOf[Char] + case IntTag => value.asInstanceOf[Int].toChar + case LongTag => value.asInstanceOf[Long].toChar + case FloatTag => value.asInstanceOf[Float].toChar + case DoubleTag => value.asInstanceOf[Double].toChar + case _ => throw new Error("value " + value + " is not a Char") + } + + def intValue: Int = tag match { + case ByteTag => value.asInstanceOf[Byte].toInt + case ShortTag => value.asInstanceOf[Short].toInt + case CharTag => value.asInstanceOf[Char].toInt + case IntTag => value.asInstanceOf[Int] + case LongTag => value.asInstanceOf[Long].toInt + case FloatTag => value.asInstanceOf[Float].toInt + case DoubleTag => value.asInstanceOf[Double].toInt + case _ => throw new Error("value " + value + " is not an Int") + } + + def longValue: Long = tag match { + case ByteTag => value.asInstanceOf[Byte].toLong + case ShortTag => value.asInstanceOf[Short].toLong + case CharTag => value.asInstanceOf[Char].toLong + case IntTag => value.asInstanceOf[Int].toLong + case LongTag => value.asInstanceOf[Long] + case FloatTag => value.asInstanceOf[Float].toLong + case DoubleTag => value.asInstanceOf[Double].toLong + case _ => throw new Error("value " + value + " is not a Long") + } + + def floatValue: Float = tag match { + case ByteTag => value.asInstanceOf[Byte].toFloat + case ShortTag => value.asInstanceOf[Short].toFloat + case CharTag => value.asInstanceOf[Char].toFloat + case IntTag => value.asInstanceOf[Int].toFloat + case LongTag => value.asInstanceOf[Long].toFloat + case FloatTag => value.asInstanceOf[Float] + case DoubleTag => value.asInstanceOf[Double].toFloat + case _ => throw new Error("value " + value + " is not a Float") + } + + def doubleValue: Double = tag match { + case ByteTag => value.asInstanceOf[Byte].toDouble + case ShortTag => value.asInstanceOf[Short].toDouble + case CharTag => value.asInstanceOf[Char].toDouble + case IntTag => value.asInstanceOf[Int].toDouble + case LongTag => value.asInstanceOf[Long].toDouble + case FloatTag => value.asInstanceOf[Float].toDouble + case DoubleTag => value.asInstanceOf[Double] + case _ => throw new Error("value " + value + " is not a Double") + } + + /** Convert constant value to conform to given type. + */ + def convertTo(pt: Type)(using Context): Constant | Null = { + def classBound(pt: Type): Type = pt.dealias.stripTypeVar match { + case tref: TypeRef if !tref.symbol.isClass && tref.info.exists => + classBound(tref.info.bounds.lo) + case param: TypeParamRef => + ctx.typerState.constraint.entry(param) match { + case TypeBounds(lo, hi) => + if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound + else classBound(lo) + case NoType => classBound(param.binder.paramInfos(param.paramNum).lo) + case inst => classBound(inst) + } + case pt => pt + } + pt match + case ConstantType(value) if value == this => this + case _: SingletonType => null + case _ => + val target = classBound(pt).typeSymbol + if (target == tpe.typeSymbol) + this + else if ((target == defn.ByteClass) && isByteRange) + Constant(byteValue) + else if (target == defn.ShortClass && isShortRange) + Constant(shortValue) + else if (target == defn.CharClass && isCharRange) + Constant(charValue) + else if (target == defn.IntClass && isIntRange) + Constant(intValue) + else if (target == defn.LongClass && isLongRange) + Constant(longValue) + else if (target == defn.FloatClass && isFloatRange) + Constant(floatValue) + else if (target == defn.DoubleClass && isNumeric) + Constant(doubleValue) + else + null + } + + def stringValue: String = value.toString + + def toText(printer: Printer): Text = printer.toText(this) + + def typeValue: Type = value.asInstanceOf[Type] + + /** + * Consider two `NaN`s to be identical, despite non-equality + * Consider -0d to be distinct from 0d, despite equality + * + * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`) + * to avoid treating different encodings of `NaN` as the same constant. + * You probably can't express different `NaN` varieties as compile time + * constants in regular Scala code, but it is conceivable that you could + * conjure them with a macro. + */ + private def equalHashValue: Any = value match { + case f: Float => floatToRawIntBits(f) + case d: Double => doubleToRawLongBits(d) + case v => v + } + + override def hashCode: Int = { + import scala.util.hashing.MurmurHash3._ + val seed = 17 + var h = seed + h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. + h = mix(h, equalHashValue.##) + finalizeHash(h, length = 2) + } + + override def toString: String = s"Constant($value)" + def canEqual(x: Any): Boolean = true + def get: Any = value + def isEmpty: Boolean = false + def _1: Any = value + } + + object Constant { + def apply(x: Null): Constant = new Constant(x, NullTag) + def apply(x: Unit): Constant = new Constant(x, UnitTag) + def apply(x: Boolean): Constant = new Constant(x, BooleanTag) + def apply(x: Byte): Constant = new Constant(x, ByteTag) + def apply(x: Short): Constant = new Constant(x, ShortTag) + def apply(x: Int): Constant = new Constant(x, IntTag) + def apply(x: Long): Constant = new Constant(x, LongTag) + def apply(x: Float): Constant = new Constant(x, FloatTag) + def apply(x: Double): Constant = new Constant(x, DoubleTag) + def apply(x: String): Constant = new Constant(x, StringTag) + def apply(x: Char): Constant = new Constant(x, CharTag) + def apply(x: Type): Constant = new Constant(x, ClazzTag) + def apply(value: Any): Constant = + new Constant(value, + value match { + case null => NullTag + case x: Unit => UnitTag + case x: Boolean => BooleanTag + case x: Byte => ByteTag + case x: Short => ShortTag + case x: Int => IntTag + case x: Long => LongTag + case x: Float => FloatTag + case x: Double => DoubleTag + case x: String => StringTag + case x: Char => CharTag + case x: Type => ClazzTag + } + ) + + def unapply(c: Constant): Constant = c + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala new file mode 100644 index 000000000000..fb87aed77c41 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala @@ -0,0 +1,214 @@ +package dotty.tools +package dotc +package core + +import Types._, Contexts._ +import printing.Showable +import util.{SimpleIdentitySet, SimpleIdentityMap} + +/** Constraint over undetermined type parameters. Constraints are built + * over values of the following types: + * + * - TypeLambda A constraint constrains the type parameters of a set of TypeLambdas + * - TypeParamRef The parameters of the constrained type lambdas + * - TypeVar Every constrained parameter might be associated with a TypeVar + * that has the TypeParamRef as origin. + */ +abstract class Constraint extends Showable { + + type This <: Constraint + + /** Does the constraint's domain contain the type parameters of `tl`? */ + def contains(tl: TypeLambda): Boolean + + /** Does the constraint's domain contain the type parameter `param`? */ + def contains(param: TypeParamRef): Boolean + + /** Does this constraint contain the type variable `tvar` and is it uninstantiated? */ + def contains(tvar: TypeVar): Boolean + + /** The constraint entry for given type parameter `param`, or NoType if `param` is not part of + * the constraint domain. Note: Low level, implementation dependent. + */ + def entry(param: TypeParamRef): Type + + /** The type variable corresponding to parameter `param`, or + * NoType, if `param` is not in constrained or is not paired with a type variable. + */ + def typeVarOfParam(param: TypeParamRef): Type + + /** Is it known that `param1 <:< param2`? */ + def isLess(param1: TypeParamRef, param2: TypeParamRef): Boolean + + /** The parameters that are known to be smaller wrt <: than `param` */ + def lower(param: TypeParamRef): List[TypeParamRef] + + /** The parameters that are known to be greater wrt <: than `param` */ + def upper(param: TypeParamRef): List[TypeParamRef] + + /** The lower dominator set. + * + * This is like `lower`, except that each parameter returned is no smaller than every other returned parameter. + */ + def minLower(param: TypeParamRef): List[TypeParamRef] + + /** The upper dominator set. + * + * This is like `upper`, except that each parameter returned is no greater than every other returned parameter. + */ + def minUpper(param: TypeParamRef): List[TypeParamRef] + + /** lower(param) \ lower(butNot) */ + def exclusiveLower(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] + + /** upper(param) \ upper(butNot) */ + def exclusiveUpper(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] + + /** The constraint bounds for given type parameter `param`. + * Poly params that are known to be smaller or greater than `param` + * are not contained in the return bounds. + * @pre `param` is not part of the constraint domain. + */ + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds + + /** A new constraint which is derived from this constraint by adding + * entries for all type parameters of `poly`. + * @param tvars A list of type variables associated with the params, + * or Nil if the constraint will just be checked for + * satisfiability but will solved to give instances of + * type variables. + */ + def add(poly: TypeLambda, tvars: List[TypeVar])(using Context): This + + /** A new constraint which is derived from this constraint by updating + * the entry for parameter `param` to `tp`. + * `tp` can be one of the following: + * + * - A TypeBounds value, indicating new constraint bounds + * - Another type, indicating a solution for the parameter + * + * @pre `this contains param`. + */ + def updateEntry(param: TypeParamRef, tp: Type)(using Context): This + + /** A constraint that includes the relationship `p1 <: p2`. + * `<:` relationships between parameters ("edges") are propagated, but + * non-parameter bounds are left alone. + * + * @param direction Must be set to `KeepParam1` or `KeepParam2` when + * `p2 <: p1` is already true depending on which parameter + * the caller intends to keep. This will avoid propagating + * bounds that will be redundant after `p1` and `p2` are + * unified. + */ + def addLess(p1: TypeParamRef, p2: TypeParamRef, + direction: UnificationDirection = UnificationDirection.NoUnification)(using Context): This + + /** A new constraint which is derived from this constraint by removing + * the type parameter `param` from the domain and replacing all top-level occurrences + * of the parameter elsewhere in the constraint by type `tp`, or a conservative + * approximation of it if that is needed to avoid cycles. + * Occurrences nested inside a refinement or prefix are not affected. + */ + def replace(param: TypeParamRef, tp: Type)(using Context): This + + /** Is entry associated with `tl` removable? This is the case if + * all type parameters of the entry are associated with type variables + * which have their `inst` fields set. + */ + def isRemovable(tl: TypeLambda): Boolean + + /** A new constraint with all entries coming from `tl` removed. */ + def remove(tl: TypeLambda)(using Context): This + + /** A new constraint with entry `from` replaced with `to` + * Rerences to `from` from within other constraint bounds are updated to `to`. + * Type variables are left alone. + */ + def subst(from: TypeLambda, to: TypeLambda)(using Context): This + + /** Is `tv` marked as hard in the constraint? */ + def isHard(tv: TypeVar): Boolean + + /** The same as this constraint, but with `tv` marked as hard. */ + def withHard(tv: TypeVar)(using Context): This + + /** Gives for each instantiated type var that does not yet have its `inst` field + * set, the instance value stored in the constraint. Storing instances in constraints + * is done only in a temporary way for contexts that may be retracted + * without also retracting the type var as a whole. + */ + def instType(tvar: TypeVar): Type + + /** The given `tl` in case it is not contained in this constraint, + * a fresh copy of `tl` otherwise. + */ + def ensureFresh(tl: TypeLambda)(using Context): TypeLambda + + /** The type lambdas constrained by this constraint */ + def domainLambdas: List[TypeLambda] + + /** The type lambda parameters constrained by this constraint */ + def domainParams: List[TypeParamRef] + + /** Check whether predicate holds for all parameters in constraint */ + def forallParams(p: TypeParamRef => Boolean): Boolean + + /** Perform operation `op` on all typevars that do not have their `inst` field set. */ + def foreachTypeVar(op: TypeVar => Unit): Unit + + /** The uninstantiated typevars of this constraint, which still have a bounds constraint + */ + def uninstVars: collection.Seq[TypeVar] + + /** Whether `tl` is present in both `this` and `that` but is associated with + * different TypeVars there, meaning that the constraints cannot be merged. + */ + def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean + + /** Does `param` occur at the toplevel in `tp` ? + * Toplevel means: the type itself or a factor in some + * combination of `&` or `|` types. + */ + def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean + + /** A string that shows the reverse dependencies maintained by this constraint + * (coDeps and contraDeps for OrderingConstraints). + */ + def depsToString(using Context): String + + /** Does the constraint restricted to variables outside `except` depend on `tv` + * in the given direction `co`? + * @param `co` If true, test whether the constraint would change if the variable is made larger + * otherwise, test whether the constraint would change if the variable is made smaller. + */ + def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean + + /** Depending on Config settngs: + * - Under `checkConstraintsNonCyclic`, check that no constrained + * parameter contains itself as a bound. + * - Under `checkConstraintDeps`, check hat reverse dependencies in + * constraints are correct and complete. + */ + def checkWellFormed()(using Context): this.type + + /** Check that constraint only refers to TypeParamRefs bound by itself */ + def checkClosed()(using Context): Unit + + /** Check that every typevar om this constraint has as origin a type parameter + * of athe type lambda that is associated with the typevar itself. + */ + def checkConsistentVars()(using Context): Unit +} + +/** When calling `Constraint#addLess(p1, p2, ...)`, the caller might end up + * unifying one parameter with the other, this enum lets `addLess` know which + * direction the unification will take. + */ +enum UnificationDirection: + /** Neither p1 nor p2 will be instantiated. */ + case NoUnification + /** `p2 := p1`, p1 left uninstantiated. */ + case KeepParam1 + /** `p1 := p2`, p2 left uninstantiated. */ + case KeepParam2 diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala new file mode 100644 index 000000000000..8bf671931260 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala @@ -0,0 +1,891 @@ +package dotty.tools +package dotc +package core + +import Types._ +import Contexts._ +import Symbols._ +import Decorators._ +import Flags._ +import config.Config +import config.Printers.typr +import typer.ProtoTypes.{newTypeVar, representedParamRef} +import UnificationDirection.* +import NameKinds.AvoidNameKind +import util.SimpleIdentitySet +import NullOpsDecorator.stripNull + +/** Methods for adding constraints and solving them. + * + * What goes into a Constraint as opposed to a ConstrainHandler? + * + * Constraint code is purely functional: Operations get constraints and produce new ones. + * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done + * elsewhere. + * + * By comparison: Constraint handlers are parts of type comparers and can use their functionality. + * Constraint handlers update the current constraint as a side effect. + */ +trait ConstraintHandling { + + def constr: config.Printers.Printer = config.Printers.constr + + protected def isSub(tp1: Type, tp2: Type)(using Context): Boolean + protected def isSame(tp1: Type, tp2: Type)(using Context): Boolean + + protected def constraint: Constraint + protected def constraint_=(c: Constraint): Unit + + private var addConstraintInvocations = 0 + + /** If the constraint is frozen we cannot add new bounds to the constraint. */ + protected var frozenConstraint: Boolean = false + + /** Potentially a type lambda that is still instantiatable, even though the constraint + * is generally frozen. + */ + protected var caseLambda: Type = NoType + + /** If set, align arguments `S1`, `S2`when taking the glb + * `T1 { X = S1 } & T2 { X = S2 }` of a constraint upper bound for some type parameter. + * Aligning means computing `S1 =:= S2` which may change the current constraint. + * See note in TypeComparer#distributeAnd. + */ + protected var homogenizeArgs: Boolean = false + + /** We are currently comparing type lambdas. Used as a flag for + * optimization: when `false`, no need to do an expensive `pruneLambdaParams` + */ + protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty + + /** Used for match type reduction: If false, we don't recognize an abstract type + * to be a subtype type of any of its base classes. This is in place only at the + * toplevel; it is turned on again when we add parts of the scrutinee to the constraint. + */ + protected var canWidenAbstract: Boolean = true + + protected var myNecessaryConstraintsOnly = false + /** When collecting the constraints needed for a particular subtyping + * judgment to be true, we sometimes need to approximate the constraint + * set (see `TypeComparer#either` for example). + * + * Normally, this means adding extra constraints which may not be necessary + * for the subtyping judgment to be true, but if this variable is set to true + * we will instead under-approximate and keep only the constraints that must + * always be present for the subtyping judgment to hold. + * + * This is needed for GADT bounds inference to be sound, but it is also used + * when constraining a method call based on its expected type to avoid adding + * constraints that would later prevent us from typechecking method + * arguments, see or-inf.scala and and-inf.scala for examples. + */ + protected def necessaryConstraintsOnly(using Context): Boolean = + ctx.mode.is(Mode.GadtConstraintInference) || myNecessaryConstraintsOnly + + /** If `trustBounds = false` we perform comparisons in a pessimistic way as follows: + * Given an abstract type `A >: L <: H`, a subtype comparison of any type + * with `A` will compare against both `L` and `H`. E.g. + * + * T <:< A if T <:< L and T <:< H + * A <:< T if L <:< T and H <:< T + * + * This restricted form makes sure we don't "forget" types when forming + * unions and intersections with abstract types that have bad bounds. E.g. + * the following example from neg/i8900.scala that @smarter came up with: + * We have a type variable X with constraints + * + * X >: 1, X >: x.M + * + * where `x` is a locally nested variable and `x.M` has bad bounds + * + * x.M >: Int | String <: Int & String + * + * If we trust bounds, then the lower bound of `X` is `x.M` since `x.M >: 1`. + * Then even if we correct levels on instantiation to eliminate the local `x`, + * it is alreay too late, we'd get `Int & String` as instance, which does not + * satisfy the original constraint `X >: 1`. + * + * But if `trustBounds` is false, we do not conclude the `x.M >: 1` since + * we compare both bounds and the upper bound `Int & String` is not a supertype + * of `1`. So the lower bound is `1 | x.M` and when we level-avoid that we + * get `1 | Int & String`, which simplifies to `Int`. + */ + private var myTrustBounds = true + + inline def withUntrustedBounds(op: => Type): Type = + val saved = myTrustBounds + myTrustBounds = false + try op finally myTrustBounds = saved + + def trustBounds: Boolean = + !Config.checkLevelsOnInstantiation || myTrustBounds + + def checkReset() = + assert(addConstraintInvocations == 0) + assert(frozenConstraint == false) + assert(caseLambda == NoType) + assert(homogenizeArgs == false) + assert(comparedTypeLambdas == Set.empty) + + def nestingLevel(param: TypeParamRef)(using Context) = constraint.typeVarOfParam(param) match + case tv: TypeVar => tv.nestingLevel + case _ => + // This should only happen when reducing match types (in + // TrackingTypeComparer#matchCases) or in uncommitable TyperStates (as + // asserted in ProtoTypes.constrained) and is special-cased in `levelOK` + // below. + Int.MaxValue + + /** Is `level` <= `maxLevel` or legal in the current context? */ + def levelOK(level: Int, maxLevel: Int)(using Context): Boolean = + level <= maxLevel + || ctx.isAfterTyper || !ctx.typerState.isCommittable // Leaks in these cases shouldn't break soundness + || level == Int.MaxValue // See `nestingLevel` above. + || !Config.checkLevelsOnConstraints + + /** If `param` is nested deeper than `maxLevel`, try to instantiate it to a + * fresh type variable of level `maxLevel` and return the new variable. + * If this isn't possible, throw a TypeError. + */ + def atLevel(maxLevel: Int, param: TypeParamRef)(using Context): TypeParamRef = + if levelOK(nestingLevel(param), maxLevel) then + return param + LevelAvoidMap(0, maxLevel)(param) match + case freshVar: TypeVar => freshVar.origin + case _ => throw TypeError( + em"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") + + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) + + /** The full lower bound of `param` includes both the `nonParamBounds` and the + * params in the constraint known to be `<: param`, except that + * params with a `nestingLevel` higher than `param` will be instantiated + * to a fresh param at a legal level. See the documentation of `TypeVar` + * for details. + */ + def fullLowerBound(param: TypeParamRef)(using Context): Type = + val maxLevel = nestingLevel(param) + var loParams = constraint.minLower(param) + if maxLevel != Int.MaxValue then + loParams = loParams.mapConserve(atLevel(maxLevel, _)) + loParams.foldLeft(nonParamBounds(param).lo)(_ | _) + + /** The full upper bound of `param`, see the documentation of `fullLowerBounds` above. */ + def fullUpperBound(param: TypeParamRef)(using Context): Type = + val maxLevel = nestingLevel(param) + var hiParams = constraint.minUpper(param) + if maxLevel != Int.MaxValue then + hiParams = hiParams.mapConserve(atLevel(maxLevel, _)) + hiParams.foldLeft(nonParamBounds(param).hi)(_ & _) + + /** Full bounds of `param`, including other lower/upper params. + * + * Note that underlying operations perform subtype checks - for this reason, recursing on `fullBounds` + * of some param when comparing types might lead to infinite recursion. Consider `bounds` instead. + */ + def fullBounds(param: TypeParamRef)(using Context): TypeBounds = + nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) + + /** An approximating map that prevents types nested deeper than maxLevel as + * well as WildcardTypes from leaking into the constraint. + */ + class LevelAvoidMap(topLevelVariance: Int, maxLevel: Int)(using Context) extends TypeOps.AvoidMap: + variance = topLevelVariance + + def toAvoid(tp: NamedType): Boolean = + tp.prefix == NoPrefix && !tp.symbol.isStatic && !levelOK(tp.symbol.nestingLevel, maxLevel) + + /** Return a (possibly fresh) type variable of a level no greater than `maxLevel` which is: + * - lower-bounded by `tp` if variance >= 0 + * - upper-bounded by `tp` if variance <= 0 + * If this isn't possible, return the empty range. + */ + def legalVar(tp: TypeVar): Type = + val oldParam = tp.origin + val nameKind = + if variance > 0 then AvoidNameKind.UpperBound + else if variance < 0 then AvoidNameKind.LowerBound + else AvoidNameKind.BothBounds + + /** If it exists, return the first param in the list created in a previous call to `legalVar(tp)` + * with the appropriate level and variance. + */ + def findParam(params: List[TypeParamRef]): Option[TypeParamRef] = + params.find(p => + nestingLevel(p) <= maxLevel && representedParamRef(p) == oldParam && + (p.paramName.is(AvoidNameKind.BothBounds) || + variance != 0 && p.paramName.is(nameKind))) + + // First, check if we can reuse an existing parameter, this is more than an optimization + // since it avoids an infinite loop in tests/pos/i8900-cycle.scala + findParam(constraint.lower(oldParam)).orElse(findParam(constraint.upper(oldParam))) match + case Some(param) => + constraint.typeVarOfParam(param) + case _ => + // Otherwise, try to return a fresh type variable at `maxLevel` with + // the appropriate constraints. + val name = nameKind(oldParam.paramName.toTermName).toTypeName + val freshVar = newTypeVar(TypeBounds.upper(tp.topType), name, + nestingLevel = maxLevel, represents = oldParam) + val ok = + if variance < 0 then + addLess(freshVar.origin, oldParam) + else if variance > 0 then + addLess(oldParam, freshVar.origin) + else + unify(freshVar.origin, oldParam) + if ok then freshVar else emptyRange + end legalVar + + override def apply(tp: Type): Type = tp match + case tp: TypeVar if !tp.isInstantiated && !levelOK(tp.nestingLevel, maxLevel) => + legalVar(tp) + // TypeParamRef can occur in tl bounds + case tp: TypeParamRef => + constraint.typeVarOfParam(tp) match + case tvar: TypeVar => + apply(tvar) + case _ => super.apply(tp) + case _ => + super.apply(tp) + + override def mapWild(t: WildcardType) = + if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) + else + val tvar = newTypeVar(apply(t.effectiveBounds).toBounds, nestingLevel = maxLevel) + tvar + end LevelAvoidMap + + /** Approximate `rawBound` if needed to make it a legal bound of `param` by + * avoiding wildcards and types with a level strictly greater than its + * `nestingLevel`. + * + * Note that level-checking must be performed here and cannot be delayed + * until instantiation because if we allow level-incorrect bounds, then we + * might end up reasoning with bad bounds outside of the scope where they are + * defined. This can lead to level-correct but unsound instantiations as + * demonstrated by tests/neg/i8900.scala. + */ + protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = + // Over-approximate for soundness. + var variance = if isUpper then -1 else 1 + // ...unless we can only infer necessary constraints, in which case we + // flip the variance to under-approximate. + if necessaryConstraintsOnly then variance = -variance + + val approx = new LevelAvoidMap(variance, nestingLevel(param)): + override def legalVar(tp: TypeVar): Type = + // `legalVar` will create a type variable whose bounds depend on + // `variance`, but whether the variance is positive or negative, + // we can still infer necessary constraints since just creating a + // type variable doesn't reduce the set of possible solutions. + // Therefore, we can safely "unflip" the variance flipped above. + // This is necessary for i8900-unflip.scala to typecheck. + val v = if necessaryConstraintsOnly then -this.variance else this.variance + atVariance(v)(super.legalVar(tp)) + approx(rawBound) + end legalBound + + protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = + if !constraint.contains(param) then true + else if !isUpper && param.occursIn(rawBound) then + // We don't allow recursive lower bounds when defining a type, + // so we shouldn't allow them as constraints either. + false + else + val bound = legalBound(param, rawBound, isUpper) + val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) + val equalBounds = (if isUpper then lo else hi) eq bound + if equalBounds && !bound.existsPart(_ eq param, StopAt.Static) then + // The narrowed bounds are equal and not recursive, + // so we can remove `param` from the constraint. + constraint = constraint.replace(param, bound) + true + else + // Narrow one of the bounds of type parameter `param` + // If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure + // that `param >: bound`. + val narrowedBounds = + val saved = homogenizeArgs + homogenizeArgs = Config.alignArgsInAnd + try + withUntrustedBounds( + if isUpper then oldBounds.derivedTypeBounds(lo, hi & bound) + else oldBounds.derivedTypeBounds(lo | bound, hi)) + finally + homogenizeArgs = saved + //println(i"narrow bounds for $param from $oldBounds to $narrowedBounds") + val c1 = constraint.updateEntry(param, narrowedBounds) + (c1 eq constraint) + || { + constraint = c1 + val TypeBounds(lo, hi) = constraint.entry(param): @unchecked + isSub(lo, hi) + } + end addOneBound + + protected def addBoundTransitively(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = + + /** Adjust the bound `tp` in the following ways: + * + * 1. Toplevel occurrences of TypeRefs that are instantiated in the current + * constraint are also dereferenced. + * 2. Toplevel occurrences of ExprTypes lead to a `NoType` return, which + * causes the addOneBound operation to fail. + * + * An occurrence is toplevel if it is the bound itself, or a term in some + * combination of `&` or `|` types. + */ + def adjust(tp: Type): Type = tp match + case tp: AndOrType => + val p1 = adjust(tp.tp1) + val p2 = adjust(tp.tp2) + if p1.exists && p2.exists then tp.derivedAndOrType(p1, p2) else NoType + case tp: TypeVar if constraint.contains(tp.origin) => + adjust(tp.underlying) + case tp: ExprType => + // ExprTypes are not value types, so type parameters should not + // be instantiated to ExprTypes. A scenario where such an attempted + // instantiation can happen is if we unify (=> T) => () with A => () + // where A is a TypeParamRef. See the comment on EtaExpansion.etaExpand + // why types such as (=> T) => () can be constructed and i7969.scala + // as a test where this happens. + // Note that scalac by contrast allows such instantiations. But letting + // type variables be ExprTypes has its own problems (e.g. you can't write + // the resulting types down) and is largely unknown terrain. + NoType + case _ => + tp + + def description = i"constraint $param ${if isUpper then "<:" else ":>"} $rawBound to\n$constraint" + constr.println(i"adding $description$location") + if isUpper && rawBound.isRef(defn.NothingClass) && ctx.typerState.isGlobalCommittable then + def msg = i"!!! instantiated to Nothing: $param, constraint = $constraint" + if Config.failOnInstantiationToNothing + then assert(false, msg) + else report.log(msg) + def others = if isUpper then constraint.lower(param) else constraint.upper(param) + val bound = adjust(rawBound) + bound.exists + && addOneBound(param, bound, isUpper) && others.forall(addOneBound(_, bound, isUpper)) + .showing(i"added $description = $result$location", constr) + end addBoundTransitively + + protected def addLess(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { + def description = i"ordering $p1 <: $p2 to\n$constraint" + val res = + if (constraint.isLess(p2, p1)) unify(p2, p1) + else { + val down1 = p1 :: constraint.exclusiveLower(p1, p2) + val up2 = p2 :: constraint.exclusiveUpper(p2, p1) + val lo1 = constraint.nonParamBounds(p1).lo + val hi2 = constraint.nonParamBounds(p2).hi + constr.println(i"adding $description down1 = $down1, up2 = $up2$location") + constraint = constraint.addLess(p1, p2) + down1.forall(addOneBound(_, hi2, isUpper = true)) && + up2.forall(addOneBound(_, lo1, isUpper = false)) + } + constr.println(i"added $description = $res$location") + res + } + + def location(using Context) = "" // i"in ${ctx.typerState.stateChainStr}" // use for debugging + + /** Unify p1 with p2: one parameter will be kept in the constraint, the + * other will be removed and its bounds transferred to the remaining one. + * + * If p1 and p2 have different `nestingLevel`, the parameter with the lowest + * level will be kept and the transferred bounds from the other parameter + * will be adjusted for level-correctness. + */ + private def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { + constr.println(s"unifying $p1 $p2") + if !constraint.isLess(p1, p2) then + constraint = constraint.addLess(p1, p2) + + val level1 = nestingLevel(p1) + val level2 = nestingLevel(p2) + val pKept = if level1 <= level2 then p1 else p2 + val pRemoved = if level1 <= level2 then p2 else p1 + + val down = constraint.exclusiveLower(p2, p1) + val up = constraint.exclusiveUpper(p1, p2) + + constraint = constraint.addLess(p2, p1, direction = if pKept eq p1 then KeepParam2 else KeepParam1) + + val boundKept = constraint.nonParamBounds(pKept).substParam(pRemoved, pKept) + var boundRemoved = constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept) + + if level1 != level2 then + boundRemoved = LevelAvoidMap(-1, math.min(level1, level2))(boundRemoved) + val TypeBounds(lo, hi) = boundRemoved: @unchecked + // After avoidance, the interval might be empty, e.g. in + // tests/pos/i8900-promote.scala: + // >: x.type <: Singleton + // becomes: + // >: Int <: Singleton + // In that case, we can still get a legal constraint + // by replacing the lower-bound to get: + // >: Int & Singleton <: Singleton + if !isSub(lo, hi) then + boundRemoved = TypeBounds(lo & hi, hi) + + val newBounds = (boundKept & boundRemoved).bounds + constraint = constraint.updateEntry(pKept, newBounds).replace(pRemoved, pKept) + + val lo = newBounds.lo + val hi = newBounds.hi + isSub(lo, hi) && + down.forall(addOneBound(_, hi, isUpper = true)) && + up.forall(addOneBound(_, lo, isUpper = false)) + } + + protected def isSubType(tp1: Type, tp2: Type, whenFrozen: Boolean)(using Context): Boolean = + if (whenFrozen) + isSubTypeWhenFrozen(tp1, tp2) + else + isSub(tp1, tp2) + + inline final def inFrozenConstraint[T](op: => T): T = { + val savedFrozen = frozenConstraint + val savedLambda = caseLambda + frozenConstraint = true + caseLambda = NoType + try op + finally { + frozenConstraint = savedFrozen + caseLambda = savedLambda + } + } + + final def isSubTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSub(tp1, tp2)) + final def isSameTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSame(tp1, tp2)) + + /** Test whether the lower bounds of all parameters in this + * constraint are a solution to the constraint. + */ + protected final def isSatisfiable(using Context): Boolean = + constraint.forallParams { param => + val TypeBounds(lo, hi) = constraint.entry(param): @unchecked + isSub(lo, hi) || { + report.log(i"sub fail $lo <:< $hi") + false + } + } + + /** Fix instance type `tp` by avoidance so that it does not contain references + * to types at level > `maxLevel`. + * @param tp the type to be fixed + * @param fromBelow whether type was obtained from lower bound + * @param maxLevel the maximum level of references allowed + * @param param the parameter that was instantiated + */ + private def fixLevels(tp: Type, fromBelow: Boolean, maxLevel: Int, param: TypeParamRef)(using Context) = + + def needsFix(tp: NamedType)(using Context) = + (tp.prefix eq NoPrefix) && tp.symbol.nestingLevel > maxLevel + + /** An accumulator that determines whether levels need to be fixed + * and computes on the side sets of nested type variables that need + * to be instantiated. + */ + def needsLeveling = new TypeAccumulator[Boolean]: + if !fromBelow then variance = -1 + + def apply(need: Boolean, tp: Type) = + need || tp.match + case tp: NamedType => + needsFix(tp) + || !stopBecauseStaticOrLocal(tp) && apply(need, tp.prefix) + case tp: TypeVar => + val inst = tp.instanceOpt + if inst.exists then apply(need, inst) + else if tp.nestingLevel > maxLevel then + // Change the nesting level of inner type variable to `maxLevel`. + // This means that the type variable will be instantiated later to a + // less nested type. If there are other references to the same type variable + // that do not come from the type undergoing `fixLevels`, this could lead + // to coarser types than intended. An alternative is to instantiate the + // type variable right away, but this also loses information. See + // i15934.scala for a test where the current strategey works but an early instantiation + // of `tp` would fail. + constr.println(i"widening nesting level of type variable $tp from ${tp.nestingLevel} to $maxLevel") + ctx.typerState.setNestingLevel(tp, maxLevel) + true + else false + case _ => + foldOver(need, tp) + end needsLeveling + + def levelAvoid = new TypeOps.AvoidMap: + if !fromBelow then variance = -1 + def toAvoid(tp: NamedType) = needsFix(tp) + + if Config.checkLevelsOnInstantiation && !ctx.isAfterTyper && needsLeveling(false, tp) then + typr.println(i"instance $tp for $param needs leveling to $maxLevel") + levelAvoid(tp) + else tp + end fixLevels + + /** Solve constraint set for given type parameter `param`. + * If `fromBelow` is true the parameter is approximated by its lower bound, + * otherwise it is approximated by its upper bound, unless the upper bound + * contains a reference to the parameter itself (such occurrences can arise + * for F-bounded types, `addOneBound` ensures that they never occur in the + * lower bound). + * The solved type is not allowed to contain references to types nested deeper + * than `maxLevel`. + * Wildcard types in bounds are approximated by their upper or lower bounds. + * The constraint is left unchanged. + * @return the instantiating type + * @pre `param` is in the constraint's domain. + */ + final def approximation(param: TypeParamRef, fromBelow: Boolean, maxLevel: Int)(using Context): Type = + constraint.entry(param) match + case entry: TypeBounds => + val useLowerBound = fromBelow || param.occursIn(entry.hi) + val rawInst = withUntrustedBounds( + if useLowerBound then fullLowerBound(param) else fullUpperBound(param)) + val levelInst = fixLevels(rawInst, fromBelow, maxLevel, param) + if levelInst ne rawInst then + typr.println(i"level avoid for $maxLevel: $rawInst --> $levelInst") + typr.println(i"approx $param, from below = $fromBelow, inst = $levelInst") + levelInst + case inst => + assert(inst.exists, i"param = $param\nconstraint = $constraint") + inst + end approximation + + private def isTransparent(tp: Type, traitOnly: Boolean)(using Context): Boolean = tp match + case AndType(tp1, tp2) => + isTransparent(tp1, traitOnly) && isTransparent(tp2, traitOnly) + case _ => + val cls = tp.underlyingClassRef(refinementOK = false).typeSymbol + cls.isTransparentClass && (!traitOnly || cls.is(Trait)) + + /** If `tp` is an intersection such that some operands are transparent trait instances + * and others are not, replace as many transparent trait instances as possible with Any + * as long as the result is still a subtype of `bound`. But fall back to the + * original type if the resulting widened type is a supertype of all dropped + * types (since in this case the type was not a true intersection of transparent traits + * and other types to start with). + */ + def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = + var kept: Set[Type] = Set() // types to keep since otherwise bound would not fit + var dropped: List[Type] = List() // the types dropped so far, last one on top + + def dropOneTransparentTrait(tp: Type): Type = + if isTransparent(tp, traitOnly = true) && !kept.contains(tp) then + dropped = tp :: dropped + defn.AnyType + else tp match + case AndType(tp1, tp2) => + val tp1w = dropOneTransparentTrait(tp1) + if tp1w ne tp1 then tp1w & tp2 + else + val tp2w = dropOneTransparentTrait(tp2) + if tp2w ne tp2 then tp1 & tp2w + else tp + case _ => + tp + + def recur(tp: Type): Type = + val tpw = dropOneTransparentTrait(tp) + if tpw eq tp then tp + else if tpw <:< bound then recur(tpw) + else + kept += dropped.head + dropped = dropped.tail + recur(tp) + + val saved = ctx.typerState.snapshot() + val tpw = recur(tp) + if (tpw eq tp) || dropped.forall(_ frozen_<:< tpw) then + // Rollback any constraint change that would lead to `tp` no longer + // being a valid solution. + ctx.typerState.resetTo(saved) + tp + else + tpw + end dropTransparentTraits + + /** If `tp` is an applied match type alias which is also an unreducible application + * of a higher-kinded type to a wildcard argument, widen to the match type's bound, + * in order to avoid an unreducible application of higher-kinded type ... in inferred type" + * error in PostTyper. Fixes #11246. + */ + def widenIrreducible(tp: Type)(using Context): Type = tp match + case tp @ AppliedType(tycon, _) if tycon.isLambdaSub && tp.hasWildcardArg => + tp.superType match + case MatchType(bound, _, _) => bound + case _ => tp + case _ => + tp + + /** Widen inferred type `inst` with upper `bound`, according to the following rules: + * 1. If `inst` is a singleton type, or a union containing some singleton types, + * widen (all) the singleton type(s), provided the result is a subtype of `bound` + * (i.e. `inst.widenSingletons <:< bound` succeeds with satisfiable constraint) and + * is not transparent according to `isTransparent`. + * 2a. If `inst` is a union type and `widenUnions` is true, approximate the union type + * from above by an intersection of all common base types, provided the result + * is a subtype of `bound`. + * 2b. If `inst` is a union type and `widenUnions` is false, turn it into a hard + * union type (except for unions | Null, which are kept in the state they were). + * 3. Widen some irreducible applications of higher-kinded types to wildcard arguments + * (see @widenIrreducible). + * 4. Drop transparent traits from intersections (see @dropTransparentTraits). + * + * Don't do these widenings if `bound` is a subtype of `scala.Singleton`. + * Also, if the result of these widenings is a TypeRef to a module class, + * and this type ref is different from `inst`, replace by a TermRef to + * its source module instead. + * + * At this point we also drop the @Repeated annotation to avoid inferring type arguments with it, + * as those could leak the annotation to users (see run/inferred-repeated-result). + */ + def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = + def widenOr(tp: Type) = + if widenUnions then + val tpw = tp.widenUnion + if (tpw ne tp) && !isTransparent(tpw, traitOnly = false) && (tpw <:< bound) then tpw else tp + else tp.hardenUnions + + def widenSingle(tp: Type) = + val tpw = tp.widenSingletons + if (tpw ne tp) && (tpw <:< bound) then tpw else tp + + def isSingleton(tp: Type): Boolean = tp match + case WildcardType(optBounds) => optBounds.exists && isSingleton(optBounds.bounds.hi) + case _ => isSubTypeWhenFrozen(tp, defn.SingletonType) + + val wideInst = + if isSingleton(bound) then inst + else + val widenedFromSingle = widenSingle(inst) + val widenedFromUnion = widenOr(widenedFromSingle) + val widened = dropTransparentTraits(widenedFromUnion, bound) + widenIrreducible(widened) + + wideInst match + case wideInst: TypeRef if wideInst.symbol.is(Module) => + TermRef(wideInst.prefix, wideInst.symbol.sourceModule) + case _ => + wideInst.dropRepeatedAnnot + end widenInferred + + /** Convert all toplevel union types in `tp` to hard unions */ + extension (tp: Type) private def hardenUnions(using Context): Type = tp.widen match + case tp: AndType => + tp.derivedAndType(tp.tp1.hardenUnions, tp.tp2.hardenUnions) + case tp: RefinedType => + tp.derivedRefinedType(tp.parent.hardenUnions, tp.refinedName, tp.refinedInfo) + case tp: RecType => + tp.rebind(tp.parent.hardenUnions) + case tp: HKTypeLambda => + tp.derivedLambdaType(resType = tp.resType.hardenUnions) + case tp: OrType => + val tp1 = tp.stripNull + if tp1 ne tp then tp.derivedOrType(tp1.hardenUnions, defn.NullType) + else tp.derivedOrType(tp.tp1.hardenUnions, tp.tp2.hardenUnions, soft = false) + case _ => + tp + + /** The instance type of `param` in the current constraint (which contains `param`). + * If `fromBelow` is true, the instance type is the lub of the parameter's + * lower bounds; otherwise it is the glb of its upper bounds. However, + * a lower bound instantiation can be a singleton type only if the upper bound + * is also a singleton type. + * The instance type is not allowed to contain references to types nested deeper + * than `maxLevel`. + */ + def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { + val approx = approximation(param, fromBelow, maxLevel).simplified + if fromBelow then + val widened = widenInferred(approx, param, widenUnions) + // Widening can add extra constraints, in particular the widened type might + // be a type variable which is now instantiated to `param`, and therefore + // cannot be used as an instantiation of `param` without creating a loop. + // If that happens, we run `instanceType` again to find a new instantation. + // (we do not check for non-toplevel occurences: those should never occur + // since `addOneBound` disallows recursive lower bounds). + if constraint.occursAtToplevel(param, widened) then + instanceType(param, fromBelow, widenUnions, maxLevel) + else + widened + else + approx + } + + /** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have + * for all poly params `p` defined in `c2` as `p >: L2 <: U2`: + * + * c1 defines p with bounds p >: L1 <: U1, and + * L2 <: L1, and + * U1 <: U2 + * + * Both `c1` and `c2` are required to derive from constraint `pre`, without adding + * any new type variables but possibly narrowing already registered ones with further bounds. + */ + protected final def subsumes(c1: Constraint, c2: Constraint, pre: Constraint)(using Context): Boolean = + if (c2 eq pre) true + else if (c1 eq pre) false + else { + val saved = constraint + try + // We iterate over params of `pre`, instead of `c2` as the documentation may suggest. + // As neither `c1` nor `c2` can have more params than `pre`, this only matters in one edge case. + // Constraint#forallParams only iterates over params that can be directly constrained. + // If `c2` has, compared to `pre`, instantiated a param and we iterated over params of `c2`, + // we could miss that param being instantiated to an incompatible type in `c1`. + pre.forallParams(p => + c1.entry(p).exists + && c2.upper(p).forall(c1.isLess(p, _)) + && isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)) + ) + finally constraint = saved + } + + /** The current bounds of type parameter `param` */ + def bounds(param: TypeParamRef)(using Context): TypeBounds = { + val e = constraint.entry(param) + if (e.exists) e.bounds + else { + // TODO: should we change the type of paramInfos to nullable? + val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos + if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala + else TypeBounds.empty + } + } + + /** Add type lambda `tl`, possibly with type variables `tvars`, to current constraint + * and propagate all bounds. + * @param tvars See Constraint#add + */ + def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = + checkPropagated(i"initialized $tl") { + constraint = constraint.add(tl, tvars) + tl.paramRefs.forall { param => + val lower = constraint.lower(param) + val upper = constraint.upper(param) + constraint.entry(param) match { + case bounds: TypeBounds => + if lower.nonEmpty && !bounds.lo.isRef(defn.NothingClass) + || upper.nonEmpty && !bounds.hi.isAny + then constr.println(i"INIT*** $tl") + lower.forall(addOneBound(_, bounds.hi, isUpper = true)) && + upper.forall(addOneBound(_, bounds.lo, isUpper = false)) + case x => + // Happens if param was already solved while processing earlier params of the same TypeLambda. + // See #4720. + true + } + } + } + + /** Can `param` be constrained with new bounds? */ + final def canConstrain(param: TypeParamRef): Boolean = + (!frozenConstraint || (caseLambda `eq` param.binder)) && constraint.contains(param) + + /** Is `param` assumed to be a sub- and super-type of any other type? + * This holds if `TypeVarsMissContext` is set unless `param` is a part + * of a MatchType that is currently normalized. + */ + final def assumedTrue(param: TypeParamRef)(using Context): Boolean = + ctx.mode.is(Mode.TypevarsMissContext) && (caseLambda `ne` param.binder) + + /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise. + * `bound` is assumed to be in normalized form, as specified in `firstTry` and + * `secondTry` of `TypeComparer`. In particular, it should not be an alias type, + * lazy ref, typevar, wildcard type, error type. In addition, upper bounds may + * not be AndTypes and lower bounds may not be OrTypes. This is assured by the + * way isSubType is organized. + */ + protected def addConstraint(param: TypeParamRef, bound: Type, fromBelow: Boolean)(using Context): Boolean = + if !bound.isValueTypeOrLambda then return false + + /** When comparing lambdas we might get constraints such as + * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter + * and `X0` is a lambda parameter. The constraint for `A` is not allowed + * to refer to such a lambda parameter because the lambda parameter is + * not visible where `A` is defined. Consequently, we need to + * approximate the bound so that the lambda parameter does not appear in it. + * If `tp` is an upper bound, we need to approximate with something smaller, + * otherwise something larger. + * Test case in pos/i94-nada.scala. This test crashes with an illegal instance + * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is + * missing. + */ + def avoidLambdaParams(tp: Type) = + if comparedTypeLambdas.nonEmpty then + val approx = new ApproximatingTypeMap { + if (!fromBelow) variance = -1 + def apply(t: Type): Type = t match { + case t @ TypeParamRef(tl: TypeLambda, n) if comparedTypeLambdas contains tl => + val bounds = tl.paramInfos(n) + range(bounds.lo, bounds.hi) + case tl: TypeLambda => + val saved = comparedTypeLambdas + comparedTypeLambdas -= tl + try mapOver(tl) + finally comparedTypeLambdas = saved + case _ => + mapOver(t) + } + } + approx(tp) + else tp + + def addParamBound(bound: TypeParamRef) = + constraint.entry(param) match { + case _: TypeBounds => + if (fromBelow) addLess(bound, param) else addLess(param, bound) + case tp => + if (fromBelow) isSub(bound, tp) else isSub(tp, bound) + } + + def kindCompatible(tp1: Type, tp2: Type): Boolean = + val tparams1 = tp1.typeParams + val tparams2 = tp2.typeParams + tparams1.corresponds(tparams2)((p1, p2) => kindCompatible(p1.paramInfo, p2.paramInfo)) + && (tparams1.isEmpty || kindCompatible(tp1.hkResult, tp2.hkResult)) + || tp1.hasAnyKind + || tp2.hasAnyKind + + def description = i"constr $param ${if (fromBelow) ">:" else "<:"} $bound:\n$constraint" + + //checkPropagated(s"adding $description")(true) // DEBUG in case following fails + checkPropagated(s"added $description") { + addConstraintInvocations += 1 + val saved = canWidenAbstract + canWidenAbstract = true + try bound match + case bound: TypeParamRef if constraint contains bound => + addParamBound(bound) + case _ => + val pbound = avoidLambdaParams(bound) + kindCompatible(param, pbound) && addBoundTransitively(param, pbound, !fromBelow) + finally + canWidenAbstract = saved + addConstraintInvocations -= 1 + } + end addConstraint + + /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ + def checkPropagated(msg: => String)(result: Boolean)(using Context): Boolean = { + if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) + inFrozenConstraint { + for (p <- constraint.domainParams) { + def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = + assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") + for (u <- constraint.upper(p)) + check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") + for (l <- constraint.lower(p)) { + check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") + check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") + } + } + } + result + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala new file mode 100644 index 000000000000..d2b1246a8149 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala @@ -0,0 +1,23 @@ +package dotty.tools.dotc +package core + +import Contexts._ +import config.Printers.{default, typr} + +trait ConstraintRunInfo { self: Run => + private var maxSize = 0 + private var maxConstraint: Constraint | Null = _ + def recordConstraintSize(c: Constraint, size: Int): Unit = + if (size > maxSize) { + maxSize = size + maxConstraint = c + } + def printMaxConstraint()(using Context): Unit = + if maxSize > 0 then + val printer = if ctx.settings.YdetailedStats.value then default else typr + printer.println(s"max constraint size: $maxSize") + try printer.println(s"max constraint = ${maxConstraint.nn.show}") + catch case ex: StackOverflowError => printer.println("max constraint cannot be printed due to stack overflow") + + protected def reset(): Unit = maxConstraint = null +} diff --git a/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala b/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala new file mode 100644 index 000000000000..20687dc1663a --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala @@ -0,0 +1,115 @@ +package dotty.tools.dotc +package core + +import Contexts._, Symbols._, Types._, Flags._ +import Denotations._, SymDenotations._ +import Names.Name, StdNames.nme +import ast.untpd +import caps.unsafe.unsafeBoxFunArg + +/** Extension methods for contexts where we want to keep the ctx. syntax */ +object ContextOps: + + extension (ctx: Context) + + /** Enter symbol into current class, if current class is owner of current context, + * or into current scope, if not. Should always be called instead of scope.enter + * in order to make sure that updates to class members are reflected in + * finger prints. + */ + def enter(sym: Symbol): Symbol = inContext(ctx) { + ctx.owner match + case cls: ClassSymbol => cls.classDenot.enter(sym) + case _ => ctx.scope.openForMutations.enter(sym) + sym + } + + /** The denotation with the given `name` and all `required` flags in current context + */ + def denotNamed(name: Name, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = + inContext(ctx) { + if (ctx.owner.isClass) + if (ctx.outer.owner == ctx.owner) { // inner class scope; check whether we are referring to self + if (ctx.scope.size == 1) { + val elem = ctx.scope.lastEntry.nn + if (elem.name == name) return elem.sym.denot // return self + } + val pre = ctx.owner.thisType + if ctx.isJava then javaFindMember(name, pre, required, excluded) + else pre.findMember(name, pre, required, excluded) + } + else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext. + ctx.owner.findMember(name, ctx.owner.thisType, required, excluded) + else + ctx.scope.denotsNamed(name).filterWithFlags(required, excluded).toDenot(NoPrefix) + } + + final def javaFindMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = + assert(ctx.isJava) + inContext(ctx) { + + val preSym = pre.typeSymbol + + // 1. Try to search in current type and parents. + val directSearch = pre.findMember(name, pre, required, excluded) + + // 2. Try to search in companion class if current is an object. + def searchCompanionClass = if preSym.is(Flags.Module) then + preSym.companionClass.thisType.findMember(name, pre, required, excluded) + else NoDenotation + + // 3. Try to search in companion objects of super classes. + // In Java code, static inner classes, which we model as members of the companion object, + // can be referenced from an ident in a subclass or by a selection prefixed by the subclass. + def searchSuperCompanionObjects = + val toSearch = if preSym.is(Flags.Module) then + if preSym.companionClass.exists then + preSym.companionClass.asClass.baseClasses + else Nil + else + preSym.asClass.baseClasses + + toSearch.iterator.map { bc => + val pre1 = bc.companionModule.namedType + pre1.findMember(name, pre1, required, excluded) + }.find(_.exists).getOrElse(NoDenotation) + + if preSym.isClass then + directSearch orElse searchCompanionClass orElse searchSuperCompanionObjects + else + directSearch + } + + /** A fresh local context with given tree and owner. + * Owner might not exist (can happen for self valdefs), in which case + * no owner is set in result context + */ + def localContext(tree: untpd.Tree, owner: Symbol): FreshContext = inContext(ctx) { + val freshCtx = ctx.fresh.setTree(tree) + if owner.exists then freshCtx.setOwner(owner) else freshCtx + } + + /** Context where `sym` is defined, assuming we are in a nested context. */ + def defContext(sym: Symbol): Context = inContext(ctx) { + ctx.outersIterator + .dropWhile(((ctx: Context) => ctx.owner != sym).unsafeBoxFunArg) + .dropWhile(((ctx: Context) => ctx.owner == sym).unsafeBoxFunArg) + .next() + } + + /** A new context for the interior of a class */ + def inClassContext(selfInfo: TypeOrSymbol): Context = + inline def op(using Context): Context = + val localCtx: Context = ctx.fresh.setNewScope + selfInfo match { + case sym: Symbol if sym.exists && sym.name != nme.WILDCARD => localCtx.scope.openForMutations.enter(sym) + case _ => + } + localCtx + op(using ctx) + + def packageContext(tree: untpd.PackageDef, pkg: Symbol): Context = inContext(ctx) { + if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree) + else ctx + } +end ContextOps diff --git a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala new file mode 100644 index 000000000000..37fde2d7b604 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala @@ -0,0 +1,1041 @@ +package dotty.tools +package dotc +package core + +import interfaces.CompilerCallback +import Decorators._ +import Periods._ +import Names._ +import Phases._ +import Types._ +import Symbols._ +import Scopes._ +import Uniques._ +import ast.Trees._ +import ast.untpd +import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} +import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} +import inlines.Inliner +import Nullables._ +import Implicits.ContextualImplicits +import config.Settings._ +import config.Config +import reporting._ +import io.{AbstractFile, NoAbstractFile, PlainFile, Path} +import scala.io.Codec +import collection.mutable +import printing._ +import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} +import classfile.ReusableDataReader +import StdNames.nme +import compiletime.uninitialized + +import annotation.internal.sharable +import annotation.retains + +import DenotTransformers.DenotTransformer +import dotty.tools.dotc.profile.Profiler +import util.Property.Key +import util.Store +import xsbti.AnalysisCallback +import plugins._ +import java.util.concurrent.atomic.AtomicInteger +import java.nio.file.InvalidPathException +import language.experimental.pureFunctions + +object Contexts { + + //@sharable var nextId = 0 + + private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() + private val (sbtCallbackLoc, store2) = store1.newLocation[AnalysisCallback]() + private val (printerFnLoc, store3) = store2.newLocation[DetachedContext -> Printer](new RefinedPrinter(_)) + private val (settingsStateLoc, store4) = store3.newLocation[SettingsState]() + private val (compilationUnitLoc, store5) = store4.newLocation[CompilationUnit]() + private val (runLoc, store6) = store5.newLocation[Run | Null]() + private val (profilerLoc, store7) = store6.newLocation[Profiler]() + private val (notNullInfosLoc, store8) = store7.newLocation[List[NotNullInfo]]() + private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() + private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) + + private val initialStore = store10 + + /** The current context */ + inline def ctx(using ctx: Context): Context = ctx + + /** Run `op` with given context */ + inline def inContext[T](c: Context)(inline op: Context ?-> T): T = + op(using c) + + /** Execute `op` at given period */ + inline def atPeriod[T](pd: Period)(inline op: Context ?-> T)(using Context): T = + op(using ctx.fresh.setPeriod(pd)) + + /** Execute `op` at given phase id */ + inline def atPhase[T](pid: PhaseId)(inline op: Context ?-> T)(using Context): T = + op(using ctx.withPhase(pid)) + + /** Execute `op` at given phase */ + inline def atPhase[T](phase: Phase)(inline op: Context ?-> T)(using Context): T = + op(using ctx.withPhase(phase)) + + inline def atNextPhase[T](inline op: Context ?-> T)(using Context): T = + atPhase(ctx.phase.next)(op) + + /** Execute `op` at the current phase if it's before the first transform phase, + * otherwise at the last phase before the first transform phase. + * + * Note: this should be used instead of `atPhaseNoLater(ctx.picklerPhase)` + * because the later won't work if the `Pickler` phase is not present (for example, + * when using `QuoteCompiler`). + */ + inline def atPhaseBeforeTransforms[T](inline op: Context ?-> T)(using Context): T = + atPhaseNoLater(firstTransformPhase.prev)(op) + + inline def atPhaseNoLater[T](limit: Phase)(inline op: Context ?-> T)(using Context): T = + op(using if !limit.exists || ctx.phase <= limit then ctx else ctx.withPhase(limit)) + + inline def atPhaseNoEarlier[T](limit: Phase)(inline op: Context ?-> T)(using Context): T = + op(using if !limit.exists || limit <= ctx.phase then ctx else ctx.withPhase(limit)) + + inline def inMode[T](mode: Mode)(inline op: Context ?-> T)(using ctx: Context): T = + op(using if mode != ctx.mode then ctx.fresh.setMode(mode) else ctx) + + inline def withMode[T](mode: Mode)(inline op: Context ?-> T)(using ctx: Context): T = + inMode(ctx.mode | mode)(op) + + inline def withoutMode[T](mode: Mode)(inline op: Context ?-> T)(using ctx: Context): T = + inMode(ctx.mode &~ mode)(op) + + inline def inDetachedContext[T](inline op: DetachedContext ?-> T)(using ctx: Context): T = + op(using ctx.detach) + + type Context = ContextCls @retains(caps.cap) + + /** A context is passed basically everywhere in dotc. + * This is convenient but carries the risk of captured contexts in + * objects that turn into space leaks. To combat this risk, here are some + * conventions to follow: + * + * - Never let an implicit context be an argument of a class whose instances + * live longer than the context. + * - Classes that need contexts for their initialization take an explicit parameter + * named `initctx`. They pass initctx to all positions where it is needed + * (and these positions should all be part of the intialization sequence of the class). + * - Classes that need contexts that survive initialization are instead passed + * a "condensed context", typically named `cctx` (or they create one). Condensed contexts + * just add some basic information to the context base without the + * risk of capturing complete trees. + * - To make sure these rules are kept, it would be good to do a sanity + * check using bytecode inspection with javap or scalap: Keep track + * of all class fields of type context; allow them only in whitelisted + * classes (which should be short-lived). + */ + abstract class ContextCls(val base: ContextBase) { + + //val id = nextId + //nextId += 1 + //assert(id != 35599) + + protected given Context = this + + def outer: ContextCls @retains(this) + def period: Period + def mode: Mode + def owner: Symbol + def tree: Tree[?] + def scope: Scope + def typerState: TyperState + def gadt: GadtConstraint + def searchHistory: SearchHistory + def source: SourceFile + + /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */ + def outersIterator: Iterator[ContextCls @retains(this)] + + /** A map in which more contextual properties can be stored + * Typically used for attributes that are read and written only in special situations. + */ + def moreProperties: Map[Key[Any], Any] + + def property[T](key: Key[T]): Option[T] = + moreProperties.get(key).asInstanceOf[Option[T]] + + /** A store that can be used by sub-components. + * Typically used for attributes that are defined only once per compilation unit. + * Access to store entries is much faster than access to properties, and only + * slightly slower than a normal field access would be. + */ + def store: Store + + /** The compiler callback implementation, or null if no callback will be called. */ + def compilerCallback: CompilerCallback = store(compilerCallbackLoc) + + /** The sbt callback implementation if we are run from sbt, null otherwise */ + def sbtCallback: AnalysisCallback = store(sbtCallbackLoc) + + /** The current plain printer */ + def printerFn: DetachedContext -> Printer = store(printerFnLoc) + + /** A function creating a printer */ + def printer: Printer = + val pr = printerFn(detach) + if this.settings.YplainPrinter.value then pr.plain else pr + + /** The current settings values */ + def settingsState: SettingsState = store(settingsStateLoc) + + /** The current compilation unit */ + def compilationUnit: CompilationUnit = store(compilationUnitLoc) + + /** The current compiler-run */ + def run: Run | Null = store(runLoc) + + /** The current compiler-run profiler */ + def profiler: Profiler = store(profilerLoc) + + /** The paths currently known to be not null */ + def notNullInfos: List[NotNullInfo] = store(notNullInfosLoc) + + /** The currently active import info */ + def importInfo: ImportInfo | Null = store(importInfoLoc) + + /** The current type assigner or typer */ + def typeAssigner: TypeAssigner = store(typeAssignerLoc) + + /** The new implicit references that are introduced by this scope */ + private var implicitsCache: ContextualImplicits | Null = null + def implicits: ContextualImplicits = { + if (implicitsCache == null) + implicitsCache = { + val implicitRefs: List[ImplicitRef] = + if (isClassDefContext) + try owner.thisType.implicitMembers + catch { + case ex: CyclicReference => Nil + } + else if (isImportContext) importInfo.nn.importedImplicits + else if (isNonEmptyScopeContext) scope.implicitDecls + else Nil + val outerImplicits = + if (isImportContext && importInfo.nn.unimported.exists) + outer.implicits exclude importInfo.nn.unimported + else + outer.implicits + if (implicitRefs.isEmpty) outerImplicits + else new ContextualImplicits(implicitRefs, outerImplicits, isImportContext)(detach) + } + implicitsCache.nn + } + + /** Either the current scope, or, if the current context owner is a class, + * the declarations of the current class. + */ + def effectiveScope(using Context): Scope = + val myOwner: Symbol | Null = owner + if myOwner != null && myOwner.isClass then myOwner.asClass.unforcedDecls + else scope + + def nestingLevel: Int = effectiveScope.nestingLevel + + /** Sourcefile corresponding to given abstract file, memoized */ + def getSource(file: AbstractFile, codec: -> Codec = Codec(settings.encoding.value)) = { + util.Stats.record("Context.getSource") + base.sources.getOrElseUpdate(file, SourceFile(file, codec)) + } + + /** SourceFile with given path name, memoized */ + def getSource(path: TermName): SourceFile = getFile(path) match + case NoAbstractFile => NoSource + case file => getSource(file) + + /** SourceFile with given path, memoized */ + def getSource(path: String): SourceFile = getSource(path.toTermName) + + /** AbstraFile with given path name, memoized */ + def getFile(name: TermName): AbstractFile = base.files.get(name) match + case Some(file) => + file + case None => + try + val file = new PlainFile(Path(name.toString)) + base.files(name) = file + file + catch + case ex: InvalidPathException => + report.error(em"invalid file path: ${ex.getMessage}") + NoAbstractFile + + /** AbstractFile with given path, memoized */ + def getFile(name: String): AbstractFile = getFile(name.toTermName) + + final def withPhase(phase: Phase): Context = ctx.fresh.setPhase(phase.id) + final def withPhase(pid: PhaseId): Context = ctx.fresh.setPhase(pid) + + private var related: SimpleIdentityMap[SourceFile, DetachedContext] | Null = null + + private def lookup(key: SourceFile): DetachedContext | Null = + util.Stats.record("Context.related.lookup") + if related == null then + related = SimpleIdentityMap.empty + null + else + related.nn(key) + + final def withSource(source: SourceFile): Context = + util.Stats.record("Context.withSource") + if this.source eq source then + this + else + var ctx1 = lookup(source) + if ctx1 == null then + util.Stats.record("Context.withSource.new") + val ctx2 = fresh.setSource(source) + if ctx2.compilationUnit eq NoCompilationUnit then + // `source` might correspond to a file not necessarily + // in the current project (e.g. when inlining library code), + // so set `mustExist` to false. + ctx2.setCompilationUnit(CompilationUnit(source, mustExist = false)) + val dctx = ctx2.detach + ctx1 = dctx + related = related.nn.updated(source, dctx) + ctx1 + + // `creationTrace`-related code. To enable, uncomment the code below and the + // call to `setCreationTrace()` in this file. + /* + /** If -Ydebug is on, the top of the stack trace where this context + * was created, otherwise `null`. + */ + private var creationTrace: Array[StackTraceElement] = uninitialized + + private def setCreationTrace() = + creationTrace = (new Throwable).getStackTrace().take(20) + + /** Print all enclosing context's creation stacktraces */ + def printCreationTraces() = { + println("=== context creation trace =======") + for (ctx <- outersIterator) { + println(s">>>>>>>>> $ctx") + if (ctx.creationTrace != null) println(ctx.creationTrace.mkString("\n")) + } + println("=== end context creation trace ===") + } + */ + + /** The current reporter */ + def reporter: Reporter = typerState.reporter + + final def phase: Phase = base.phases(period.firstPhaseId) + final def runId = period.runId + final def phaseId = period.phaseId + + final def lastPhaseId = base.phases.length - 1 + + /** Does current phase use an erased types interpretation? */ + final def erasedTypes = phase.erasedTypes + + /** Are we in a Java compilation unit? */ + final def isJava: Boolean = compilationUnit.isJava + + /** Is current phase after TyperPhase? */ + final def isAfterTyper = base.isAfterTyper(phase) + final def isTyper = base.isTyper(phase) + + /** Is this a context for the members of a class definition? */ + def isClassDefContext: Boolean = + owner.isClass && (owner ne outer.owner) + + /** Is this a context that introduces an import clause? */ + def isImportContext: Boolean = + (this ne NoContext) + && (outer ne NoContext) + && (this.importInfo nen outer.importInfo) + + /** Is this a context that introduces a non-empty scope? */ + def isNonEmptyScopeContext: Boolean = + (this.scope ne outer.scope) && !this.scope.isEmpty + + /** Is this a context for typechecking an inlined body? */ + def isInlineContext: Boolean = + typer.isInstanceOf[Inliner#InlineTyper] + + /** The next outer context whose tree is a template or package definition + * Note: Currently unused + def enclTemplate: Context = { + var c = this + while (c != NoContext && !c.tree.isInstanceOf[Template[?]] && !c.tree.isInstanceOf[PackageDef[?]]) + c = c.outer + c + }*/ + + /** The context for a supercall. This context is used for elaborating + * the parents of a class and their arguments. + * The context is computed from the current class context. It has + * + * - as owner: The primary constructor of the class + * - as outer context: The context enclosing the class context + * - as scope: The parameter accessors in the class context + * + * The reasons for this peculiar choice of attributes are as follows: + * + * - The constructor must be the owner, because that's where any local methods or closures + * should go. + * - The context may not see any class members (inherited or defined), and should + * instead see definitions defined in the outer context which might be shadowed by + * such class members. That's why the outer context must be the outer context of the class. + * - At the same time the context should see the parameter accessors of the current class, + * that's why they get added to the local scope. An alternative would have been to have the + * context see the constructor parameters instead, but then we'd need a final substitution step + * from constructor parameters to class parameter accessors. + */ + def superCallContext: Context = { + val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors: _*) + superOrThisCallContext(owner.primaryConstructor, locals) + } + + /** The context for the arguments of a this(...) constructor call. + * The context is computed from the local auxiliary constructor context. + * It has + * + * - as owner: The auxiliary constructor + * - as outer context: The context enclosing the enclosing class context + * - as scope: The parameters of the auxiliary constructor. + */ + def thisCallArgContext: Context = { + val constrCtx = detach.outersIterator.dropWhile(_.outer.owner == owner).next() + superOrThisCallContext(owner, constrCtx.scope) + .setTyperState(typerState) + .setGadt(gadt) + .fresh + .setScope(this.scope) + } + + /** The super- or this-call context with given owner and locals. */ + private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = { + var classCtx = detach.outersIterator.dropWhile(!_.isClassDefContext).next() + classCtx.outer.fresh.setOwner(owner) + .setScope(locals) + .setMode(classCtx.mode) + } + + /** The context of expression `expr` seen as a member of a statement sequence */ + def exprContext(stat: Tree[?], exprOwner: Symbol): Context = + if (exprOwner == this.owner) this + else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext + else fresh.setOwner(exprOwner) + + /** A new context that summarizes an import statement */ + def importContext(imp: Import[?], sym: Symbol): FreshContext = + fresh.setImportInfo(ImportInfo(sym, imp.selectors, imp.expr)) + + /** Is the debug option set? */ + def debug: Boolean = base.settings.Ydebug.value + + /** Is the verbose option set? */ + def verbose: Boolean = base.settings.verbose.value + + /** Should use colors when printing? */ + def useColors: Boolean = + base.settings.color.value == "always" + + /** Is the explicit nulls option set? */ + def explicitNulls: Boolean = base.settings.YexplicitNulls.value + + /** A fresh clone of this context embedded in this context. */ + def fresh: FreshContext = freshOver(this) + + /** A fresh clone of this context embedded in the specified `outer` context. */ + def freshOver(outer: Context): FreshContext = + util.Stats.record("Context.fresh") + FreshContext(base).init(outer, this).setTyperState(this.typerState) + + final def withOwner(owner: Symbol): Context = + if (owner ne this.owner) fresh.setOwner(owner) else this + + final def withTyperState(typerState: TyperState): Context = + if typerState ne this.typerState then fresh.setTyperState(typerState) else this + + final def withUncommittedTyperState: Context = + withTyperState(typerState.uncommittedAncestor) + + final def withProperty[T](key: Key[T], value: Option[T]): Context = + if (property(key) == value) this + else value match { + case Some(v) => fresh.setProperty(key, v) + case None => fresh.dropProperty(key) + } + + def typer: Typer = this.typeAssigner match { + case typer: Typer => typer + case _ => new Typer + } + + override def toString: String = + //if true then + // outersIterator.map { ctx => + // i"${ctx.id} / ${ctx.owner} / ${ctx.moreProperties.valuesIterator.map(_.getClass).toList.mkString(", ")}" + // }.mkString("\n") + //else + def iinfo(using Context) = + val info = ctx.importInfo + if (info == null) "" else i"${info.selectors}%, %" + def cinfo(using Context) = + val core = s" owner = ${ctx.owner}, scope = ${ctx.scope}, import = $iinfo" + if (ctx ne NoContext) && (ctx.implicits ne ctx.outer.implicits) then + s"$core, implicits = ${ctx.implicits}" + else + core + s"""Context( + |${outersIterator.map(ctx => cinfo(using ctx)).mkString("\n\n")})""".stripMargin + + def settings: ScalaSettings = base.settings + def definitions: Definitions = base.definitions + def platform: Platform = base.platform + def pendingUnderlying: util.HashSet[Type] = base.pendingUnderlying + def uniqueNamedTypes: Uniques.NamedTypeUniques = base.uniqueNamedTypes + def uniques: util.WeakHashSet[Type] = base.uniques + + def initialize()(using Context): Unit = base.initialize() + + protected def resetCaches(): Unit = + implicitsCache = null + related = null + + /** Reuse this context as a fresh context nested inside `outer` */ + def reuseIn(outer: Context): this.type + + def detach: DetachedContext + } + + object detached: + opaque type DetachedContext <: ContextCls = ContextCls + inline def apply(c: ContextCls): DetachedContext = c + + type DetachedContext = detached.DetachedContext + + /** A condensed context provides only a small memory footprint over + * a Context base, and therefore can be stored without problems in + * long-lived objects. + abstract class CondensedContext extends Context { + override def condensed = this + } + */ + + /** A fresh context allows selective modification + * of its attributes using the with... methods. + */ + class FreshContext(base: ContextBase) extends ContextCls(base) { thiscontext => + + private var _outer: DetachedContext = uninitialized + def outer: DetachedContext = _outer + + def outersIterator: Iterator[ContextCls] = new Iterator[ContextCls] { + var current: ContextCls = thiscontext + def hasNext = current != NoContext + def next = { val c = current; current = current.outer; c } + } + + private var _period: Period = uninitialized + final def period: Period = _period + + private var _mode: Mode = uninitialized + final def mode: Mode = _mode + + private var _owner: Symbol = uninitialized + final def owner: Symbol = _owner + + private var _tree: Tree[?]= _ + final def tree: Tree[?] = _tree + + private var _scope: Scope = uninitialized + final def scope: Scope = _scope + + private var _typerState: TyperState = uninitialized + final def typerState: TyperState = _typerState + + private var _gadt: GadtConstraint = uninitialized + final def gadt: GadtConstraint = _gadt + + private var _searchHistory: SearchHistory = uninitialized + final def searchHistory: SearchHistory = _searchHistory + + private var _source: SourceFile = uninitialized + final def source: SourceFile = _source + + private var _moreProperties: Map[Key[Any], Any] = uninitialized + final def moreProperties: Map[Key[Any], Any] = _moreProperties + + private var _store: Store = uninitialized + final def store: Store = _store + + /** Initialize all context fields, except typerState, which has to be set separately + * @param outer The outer context + * @param origin The context from which fields are copied + */ + private[Contexts] def init(outer: Context, origin: Context): this.type = { + _outer = outer.asInstanceOf[DetachedContext] + _period = origin.period + _mode = origin.mode + _owner = origin.owner + _tree = origin.tree + _scope = origin.scope + _gadt = origin.gadt + _searchHistory = origin.searchHistory + _source = origin.source + _moreProperties = origin.moreProperties + _store = origin.store + this + } + + def reuseIn(outer: Context): this.type = + resetCaches() + init(outer, outer) + + def detach: DetachedContext = detached(this) + + def setPeriod(period: Period): this.type = + util.Stats.record("Context.setPeriod") + assert(period.firstPhaseId == period.lastPhaseId, period) + this._period = period + this + + def setMode(mode: Mode): this.type = + util.Stats.record("Context.setMode") + this._mode = mode + this + + def setOwner(owner: Symbol): this.type = + util.Stats.record("Context.setOwner") + assert(owner != NoSymbol) + this._owner = owner + this + + def setTree(tree: Tree[?]): this.type = + util.Stats.record("Context.setTree") + this._tree = tree + this + + def setScope(scope: Scope): this.type = + this._scope = scope + this + + def setNewScope: this.type = + util.Stats.record("Context.setScope") + this._scope = newScope + this + + def setTyperState(typerState: TyperState): this.type = + this._typerState = typerState + this + def setNewTyperState(): this.type = + setTyperState(typerState.fresh(committable = true)) + def setExploreTyperState(): this.type = + setTyperState(typerState.fresh(committable = false)) + def setReporter(reporter: Reporter): this.type = + setTyperState(typerState.fresh().setReporter(reporter)) + + def setTyper(typer: Typer): this.type = + this._scope = typer.scope + setTypeAssigner(typer) + + def setGadt(gadt: GadtConstraint): this.type = + util.Stats.record("Context.setGadt") + this._gadt = gadt + this + def setFreshGADTBounds: this.type = + setGadt(gadt.fresh) + + def setSearchHistory(searchHistory: SearchHistory): this.type = + util.Stats.record("Context.setSearchHistory") + this._searchHistory = searchHistory + this + + def setSource(source: SourceFile): this.type = + util.Stats.record("Context.setSource") + this._source = source + this + + private def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = + util.Stats.record("Context.setMoreProperties") + this._moreProperties = moreProperties + this + + private def setStore(store: Store): this.type = + util.Stats.record("Context.setStore") + this._store = store + this + + def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { + setSource(compilationUnit.source) + updateStore(compilationUnitLoc, compilationUnit) + } + + def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) + def setSbtCallback(callback: AnalysisCallback): this.type = updateStore(sbtCallbackLoc, callback) + def setPrinterFn(printer: DetachedContext -> Printer): this.type = updateStore(printerFnLoc, printer) + def setSettings(settingsState: SettingsState): this.type = updateStore(settingsStateLoc, settingsState) + def setRun(run: Run | Null): this.type = updateStore(runLoc, run) + def setProfiler(profiler: Profiler): this.type = updateStore(profilerLoc, profiler) + def setNotNullInfos(notNullInfos: List[NotNullInfo]): this.type = updateStore(notNullInfosLoc, notNullInfos) + def setImportInfo(importInfo: ImportInfo): this.type = + importInfo.mentionsFeature(nme.unsafeNulls) match + case Some(true) => + setMode(this.mode &~ Mode.SafeNulls) + case Some(false) if ctx.settings.YexplicitNulls.value => + setMode(this.mode | Mode.SafeNulls) + case _ => + updateStore(importInfoLoc, importInfo) + def setTypeAssigner(typeAssigner: TypeAssigner): this.type = updateStore(typeAssignerLoc, typeAssigner) + + def setProperty[T](key: Key[T], value: T): this.type = + setMoreProperties(moreProperties.updated(key, value)) + + def dropProperty(key: Key[?]): this.type = + setMoreProperties(moreProperties - key) + + def addLocation[T](initial: T): Store.Location[T] = { + val (loc, store1) = store.newLocation(initial) + setStore(store1) + loc + } + + def addLocation[T](): Store.Location[T] = { + val (loc, store1) = store.newLocation[T]() + setStore(store1) + loc + } + + def updateStore[T](loc: Store.Location[T], value: T): this.type = + setStore(store.updated(loc, value)) + + def setPhase(pid: PhaseId): this.type = setPeriod(Period(runId, pid)) + def setPhase(phase: Phase): this.type = setPeriod(Period(runId, phase.start, phase.end)) + + def setSetting[T](setting: Setting[T], value: T): this.type = + setSettings(setting.updateIn(settingsState, value)) + + def setDebug: this.type = setSetting(base.settings.Ydebug, true) + } + + object FreshContext: + /** Defines an initial context with given context base and possible settings. */ + def initial(base: ContextBase, settingsGroup: SettingGroup): Context = + val c = new FreshContext(base) + c._outer = NoContext + c._period = InitialPeriod + c._mode = Mode.None + c._typerState = TyperState.initialState() + c._owner = NoSymbol + c._tree = untpd.EmptyTree + c._moreProperties = Map(MessageLimiter -> DefaultMessageLimiter()) + c._scope = EmptyScope + c._source = NoSource + c._store = initialStore + .updated(settingsStateLoc, settingsGroup.defaultState) + .updated(notNullInfosLoc, Nil) + .updated(compilationUnitLoc, NoCompilationUnit) + c._searchHistory = new SearchRoot + c._gadt = GadtConstraint.empty + c + end FreshContext + + given detachedCtx(using c: Context): DetachedContext = c.detach + + given ops: AnyRef with + extension (c: Context) + def addNotNullInfo(info: NotNullInfo): Context = + c.withNotNullInfos(c.notNullInfos.extendWith(info)) + + def addNotNullRefs(refs: Set[TermRef]): Context = + c.addNotNullInfo(NotNullInfo(refs, Set())) + + def withNotNullInfos(infos: List[NotNullInfo]): Context = + if c.notNullInfos eq infos then c else c.fresh.setNotNullInfos(infos) + + def relaxedOverrideContext: Context = + c.withModeBits(c.mode &~ Mode.SafeNulls | Mode.RelaxedOverriding) + end ops + + // TODO: Fix issue when converting ModeChanges and FreshModeChanges to extension givens + extension (c: Context) { + final def withModeBits(mode: Mode): Context = + if (mode != c.mode) c.fresh.setMode(mode) else c + + final def addMode(mode: Mode): Context = withModeBits(c.mode | mode) + final def retractMode(mode: Mode): Context = withModeBits(c.mode &~ mode) + } + + extension (c: FreshContext) { + final def addMode(mode: Mode): c.type = c.setMode(c.mode | mode) + final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode) + } + + private def exploreCtx(using Context): FreshContext = + util.Stats.record("explore") + val base = ctx.base + import base._ + val nestedCtx = + if exploresInUse < exploreContexts.size then + exploreContexts(exploresInUse).reuseIn(ctx) + else + val ts = TyperState() + .setReporter(ExploringReporter()) + .setCommittable(false) + val c = FreshContext(ctx.base).init(ctx, ctx).setTyperState(ts) + exploreContexts += c + c + exploresInUse += 1 + val nestedTS = nestedCtx.typerState + nestedTS.init(ctx.typerState, ctx.typerState.constraint) + nestedCtx + + private def wrapUpExplore(ectx: Context) = + ectx.reporter.asInstanceOf[ExploringReporter].reset() + ectx.base.exploresInUse -= 1 + + inline def explore[T](inline op: Context ?=> T)(using Context): T = + val ectx = exploreCtx + try op(using ectx) finally wrapUpExplore(ectx) + + inline def exploreInFreshCtx[T](inline op: FreshContext ?=> T)(using Context): T = + val ectx = exploreCtx + try op(using ectx) finally wrapUpExplore(ectx) + + private def changeOwnerCtx(owner: Symbol)(using Context): Context = + val base = ctx.base + import base._ + val nestedCtx = + if changeOwnersInUse < changeOwnerContexts.size then + changeOwnerContexts(changeOwnersInUse).reuseIn(ctx) + else + val c = FreshContext(ctx.base).init(ctx, ctx) + changeOwnerContexts += c + c + changeOwnersInUse += 1 + nestedCtx.setOwner(owner).setTyperState(ctx.typerState) + + /** Run `op` in current context, with a mode is temporarily set as specified. + */ + inline def runWithOwner[T](owner: Symbol)(inline op: Context ?=> T)(using Context): T = + if Config.reuseOwnerContexts then + try op(using changeOwnerCtx(owner)) + finally ctx.base.changeOwnersInUse -= 1 + else + op(using ctx.fresh.setOwner(owner)) + + /** The type comparer of the kind created by `maker` to be used. + * This is the currently active type comparer CMP if + * - CMP is associated with the current context, and + * - CMP is of the kind created by maker or maker creates a plain type comparer. + * Note: plain TypeComparers always take on the kind of the outer comparer if they are in the same context. + * In other words: tracking or explaining is a sticky property in the same context. + */ + private def comparer(using Context): TypeComparer = + util.Stats.record("comparing") + val base = ctx.base + if base.comparersInUse > 0 + && (base.comparers(base.comparersInUse - 1).comparerContext eq ctx) + then + base.comparers(base.comparersInUse - 1).currentInstance + else + val result = + if base.comparersInUse < base.comparers.size then + base.comparers(base.comparersInUse) + else + val result = TypeComparer(ctx) + base.comparers += result + result + base.comparersInUse += 1 + result.init(ctx) + result + + inline def comparing[T](inline op: TypeComparer => T)(using Context): T = + util.Stats.record("comparing") + val saved = ctx.base.comparersInUse + try op(comparer) + finally ctx.base.comparersInUse = saved + end comparing + + @sharable val NoContext: DetachedContext = detached( + new FreshContext((null: ContextBase | Null).uncheckedNN) { + override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null, false)(detached(this: @unchecked)) + setSource(NoSource) + } + ) + + /** A context base defines state and associated methods that exist once per + * compiler run. + */ + class ContextBase extends ContextState + with Phases.PhasesBase + with Plugins { + + /** The applicable settings */ + val settings: ScalaSettings = new ScalaSettings + + /** The initial context */ + val initialCtx: Context = FreshContext.initial(this: @unchecked, settings) + + /** The platform, initialized by `initPlatform()`. */ + private var _platform: Platform | Null = uninitialized + + /** The platform */ + def platform: Platform = { + val p = _platform + if p == null then + throw new IllegalStateException( + "initialize() must be called before accessing platform") + p + } + + protected def newPlatform(using Context): Platform = + if (settings.scalajs.value) new SJSPlatform + else new JavaPlatform + + /** The loader that loads the members of _root_ */ + def rootLoader(root: TermSymbol)(using Context): SymbolLoader = platform.rootLoader(root) + + /** The standard definitions */ + val definitions: Definitions = new Definitions + + // Set up some phases to get started */ + usePhases(List(SomePhase)) + + /** Initializes the `ContextBase` with a starting context. + * This initializes the `platform` and the `definitions`. + */ + def initialize()(using Context): Unit = { + _platform = newPlatform + definitions.init() + } + + def fusedContaining(p: Phase): Phase = + allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) + } + + /** The essential mutable state of a context base, collected into a common class */ + class ContextState { + // Symbols state + + /** Counter for unique symbol ids */ + private var _nextSymId: Int = 0 + def nextSymId: Int = { _nextSymId += 1; _nextSymId } + + /** Sources and Files that were loaded */ + val sources: util.HashMap[AbstractFile, SourceFile] = util.HashMap[AbstractFile, SourceFile]() + val files: util.HashMap[TermName, AbstractFile] = util.HashMap() + + // Types state + /** A table for hash consing unique types */ + private[core] val uniques: Uniques = Uniques() + + /** A table for hash consing unique applied types */ + private[dotc] val uniqueAppliedTypes: AppliedUniques = AppliedUniques() + + /** A table for hash consing unique named types */ + private[core] val uniqueNamedTypes: NamedTypeUniques = NamedTypeUniques() + + var emptyTypeBounds: TypeBounds | Null = null + var emptyWildcardBounds: WildcardType | Null = null + + /** Number of findMember calls on stack */ + private[core] var findMemberCount: Int = 0 + + /** List of names which have a findMemberCall on stack, + * after Config.LogPendingFindMemberThreshold is reached. + */ + private[core] var pendingMemberSearches: List[Name] = Nil + + /** The number of recursive invocation of underlying on a NamedType + * during a controlled operation. + */ + private[core] var underlyingRecursions: Int = 0 + + /** The set of named types on which a currently active invocation + * of underlying during a controlled operation exists. */ + private[core] val pendingUnderlying: util.HashSet[Type] = util.HashSet[Type]() + + /** A map from ErrorType to associated message. We use this map + * instead of storing messages directly in ErrorTypes in order + * to avoid space leaks - the message usually captures a context. + */ + private[core] val errorTypeMsg: mutable.Map[Types.ErrorType, Message] = mutable.Map() + + // Phases state + + private[core] var phasesPlan: List[List[Phase]] = uninitialized + + /** Phases by id */ + private[dotc] var phases: Array[Phase] = uninitialized + + /** Phases with consecutive Transforms grouped into a single phase, Empty array if fusion is disabled */ + private[core] var fusedPhases: Array[Phase] = Array.empty[Phase] + + /** Next denotation transformer id */ + private[core] var nextDenotTransformerId: Array[Int] = uninitialized + + private[core] var denotTransformers: Array[DenotTransformer] = uninitialized + + /** Flag to suppress inlining, set after overflow */ + private[dotc] var stopInlining: Boolean = false + + /** A variable that records that some error was reported in a globally committable context. + * The error will not necessarlily be emitted, since it could still be that + * the enclosing context will be aborted. The variable is used as a smoke test + * to turn off assertions that might be wrong if the program is erroneous. To + * just test for `ctx.reporter.errorsReported` is not always enough, since it + * could be that the context in which the assertion is tested is a completer context + * that's different from the context where the error was reported. See i13218.scala + * for a test. + */ + private[dotc] var errorsToBeReported = false + + // Reporters state + private[dotc] var indent: Int = 0 + + protected[dotc] val indentTab: String = " " + + private[Contexts] val exploreContexts = new mutable.ArrayBuffer[FreshContext] + private[Contexts] var exploresInUse: Int = 0 + + private[Contexts] val changeOwnerContexts = new mutable.ArrayBuffer[FreshContext] + private[Contexts] var changeOwnersInUse: Int = 0 + + private[Contexts] val comparers = new mutable.ArrayBuffer[TypeComparer] + private[Contexts] var comparersInUse: Int = 0 + + private var charArray = new Array[Char](256) + + private[core] val reusableDataReader = ReusableInstance(new ReusableDataReader()) + + private[dotc] var wConfCache: (List[String], WConf) = uninitialized + + def sharedCharArray(len: Int): Array[Char] = + while len > charArray.length do + charArray = new Array[Char](charArray.length * 2) + charArray + + def reset(): Unit = + uniques.clear() + uniqueAppliedTypes.clear() + uniqueNamedTypes.clear() + emptyTypeBounds = null + emptyWildcardBounds = null + errorsToBeReported = false + errorTypeMsg.clear() + sources.clear() + files.clear() + comparers.clear() // forces re-evaluation of top and bottom classes in TypeComparer + + // Test that access is single threaded + + /** The thread on which `checkSingleThreaded was invoked last */ + @sharable private var thread: Thread | Null = null + + /** Check that we are on the same thread as before */ + def checkSingleThreaded(): Unit = + if (thread == null) thread = Thread.currentThread() + else assert(thread == Thread.currentThread(), "illegal multithreaded access to ContextBase") + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala new file mode 100644 index 000000000000..f9844c6eaab6 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala @@ -0,0 +1,322 @@ +package dotty.tools +package dotc +package core + +import scala.annotation.tailrec +import scala.collection.mutable.ListBuffer +import scala.util.control.NonFatal + +import Contexts._, Names._, Phases._, Symbols._ +import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ +import transform.MegaPhase +import reporting.{Message, NoExplanation} +import language.experimental.pureFunctions +import annotation.retains + +/** This object provides useful extension methods for types defined elsewhere */ +object Decorators { + + /** Extension methods for toType/TermName methods on PreNames. + */ + extension (pn: PreName) + def toTermName: TermName = pn match + case s: String => termName(s) + case n: Name => n.toTermName + def toTypeName: TypeName = pn match + case s: String => typeName(s) + case n: Name => n.toTypeName + + extension (s: String) + def splitWhere(f: Char => Boolean, doDropIndex: Boolean): Option[(String, String)] = + def splitAt(idx: Int, doDropIndex: Boolean): Option[(String, String)] = + if (idx == -1) None + else Some((s.take(idx), s.drop(if (doDropIndex) idx + 1 else idx))) + splitAt(s.indexWhere(f), doDropIndex) + + /** Create a term name from a string slice, using a common buffer. + * This avoids some allocation relative to `termName(s)` + */ + def sliceToTermName(start: Int, end: Int)(using Context): SimpleName = + val len = end - start + val chars = ctx.base.sharedCharArray(len) + s.getChars(start, end, chars, 0) + termName(chars, 0, len) + + def sliceToTypeName(start: Int, end: Int)(using Context): TypeName = + sliceToTermName(start, end).toTypeName + + def concat(name: Name)(using Context): SimpleName = name match + case name: SimpleName => + val len = s.length + name.length + var chars = ctx.base.sharedCharArray(len) + s.getChars(0, s.length, chars, 0) + if name.length != 0 then name.getChars(0, name.length, chars, s.length) + termName(chars, 0, len) + case name: TypeName => s.concat(name.toTermName) + case _ => termName(s.concat(name.toString).nn) + + def indented(width: Int): String = + val padding = " " * width + padding + s.replace("\n", "\n" + padding) + end extension + + /** Convert lazy string to message. To be with caution, since no message-defined + * formatting will be done on the string. + */ + extension (str: -> String) + def toMessage: Message = NoExplanation(str)(using NoContext) + + /** Implements a findSymbol method on iterators of Symbols that + * works like find but avoids Option, replacing None with NoSymbol. + */ + extension (it: Iterator[Symbol]) + final def findSymbol(p: Symbol => Boolean): Symbol = { + while (it.hasNext) { + val sym = it.next() + if (p(sym)) return sym + } + NoSymbol + } + + inline val MaxFilterRecursions = 10 + + /** Implements filterConserve, zipWithConserve methods + * on lists that avoid duplication of list nodes where feasible. + */ + extension [T](xs: List[T]) + final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.cap)): List[U] = + xs.collect(pf.asInstanceOf) + + final def mapconserve[U](f: T => U): List[U] = { + @tailrec + def loop(mapped: ListBuffer[U] | Null, unchanged: List[U], pending: List[T]): List[U] = + if (pending.isEmpty) + if (mapped == null) unchanged + else mapped.prependToList(unchanged) + else { + val head0 = pending.head + val head1 = f(head0) + + if (head1.asInstanceOf[AnyRef] eq head0.asInstanceOf[AnyRef]) + loop(mapped, unchanged, pending.tail) + else { + val b = if (mapped == null) new ListBuffer[U] else mapped + var xc = unchanged + while (xc ne pending) { + b += xc.head + xc = xc.tail + } + b += head1 + val tail0 = pending.tail + loop(b, tail0.asInstanceOf[List[U]], tail0) + } + } + loop(null, xs.asInstanceOf[List[U]], xs) + } + + /** Like `xs filter p` but returns list `xs` itself - instead of a copy - + * if `p` is true for all elements. + */ + def filterConserve(p: T => Boolean): List[T] = + + def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = + if from eq until then buf else addAll(buf += from.head, from.tail, until) + + def loopWithBuffer(buf: ListBuffer[T], xs: List[T]): List[T] = xs match + case x :: xs1 => + if p(x) then buf += x + loopWithBuffer(buf, xs1) + case nil => buf.toList + + def loop(keep: List[T], explore: List[T], keepCount: Int, recCount: Int): List[T] = + explore match + case x :: rest => + if p(x) then + loop(keep, rest, keepCount + 1, recCount) + else if keepCount <= 3 && recCount <= MaxFilterRecursions then + val rest1 = loop(rest, rest, 0, recCount + 1) + keepCount match + case 0 => rest1 + case 1 => keep.head :: rest1 + case 2 => keep.head :: keep.tail.head :: rest1 + case 3 => val tl = keep.tail; keep.head :: tl.head :: tl.tail.head :: rest1 + else + loopWithBuffer(addAll(new ListBuffer[T], keep, explore), rest) + case nil => + keep + + loop(xs, xs, 0, 0) + end filterConserve + + /** Like `xs.lazyZip(ys).map(f)`, but returns list `xs` itself + * - instead of a copy - if function `f` maps all elements of + * `xs` to themselves. Also, it is required that `ys` is at least + * as long as `xs`. + */ + def zipWithConserve[U, V <: T](ys: List[U])(f: (T, U) => V): List[V] = + if (xs.isEmpty || ys.isEmpty) Nil + else { + val x1 = f(xs.head, ys.head) + val xs1 = xs.tail.zipWithConserve(ys.tail)(f) + if (x1.asInstanceOf[AnyRef] eq xs.head.asInstanceOf[AnyRef]) && (xs1 eq xs.tail) + then xs.asInstanceOf[List[V]] + else x1 :: xs1 + } + + /** Like `xs.lazyZip(xs.indices).map(f)`, but returns list `xs` itself + * - instead of a copy - if function `f` maps all elements of + * `xs` to themselves. + */ + def mapWithIndexConserve[U <: T](f: (T, Int) => U): List[U] = + + @tailrec + def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = + if from eq until then buf else addAll(buf += from.head, from.tail, until) + + @tailrec + def loopWithBuffer(buf: ListBuffer[U], explore: List[T], idx: Int): List[U] = explore match + case Nil => buf.toList + case t :: rest => loopWithBuffer(buf += f(t, idx), rest, idx + 1) + + @tailrec + def loop(keep: List[T], explore: List[T], idx: Int): List[U] = explore match + case Nil => keep.asInstanceOf[List[U]] + case t :: rest => + val u = f(t, idx) + if u.asInstanceOf[AnyRef] eq t.asInstanceOf[AnyRef] then + loop(keep, rest, idx + 1) + else + val buf = addAll(new ListBuffer[T], keep, explore).asInstanceOf[ListBuffer[U]] + loopWithBuffer(buf += u, rest, idx + 1) + + loop(xs, xs, 0) + end mapWithIndexConserve + + /** True if two lists have the same length. Since calling length on linear sequences + * is Θ(n), it is an inadvisable way to test length equality. This method is Θ(n min m). + */ + final def hasSameLengthAs[U](ys: List[U]): Boolean = { + @tailrec def loop(xs: List[T], ys: List[U]): Boolean = + if (xs.isEmpty) ys.isEmpty + else ys.nonEmpty && loop(xs.tail, ys.tail) + loop(xs, ys) + } + + @tailrec final def eqElements(ys: List[AnyRef]): Boolean = xs match { + case x :: _ => + ys match { + case y :: _ => + x.asInstanceOf[AnyRef].eq(y) && + xs.tail.eqElements(ys.tail) + case _ => false + } + case nil => ys.isEmpty + } + + /** Union on lists seen as sets */ + def setUnion (ys: List[T]): List[T] = xs ::: ys.filterNot(xs contains _) + + extension [T, U](xss: List[List[T]]) + def nestedMap(f: T => U): List[List[U]] = xss match + case xs :: xss1 => xs.map(f) :: xss1.nestedMap(f) + case nil => Nil + def nestedMapConserve(f: T => U): List[List[U]] = + xss.mapconserve(_.mapconserve(f)) + def nestedZipWithConserve(yss: List[List[U]])(f: (T, U) => T): List[List[T]] = + xss.zipWithConserve(yss)((xs, ys) => xs.zipWithConserve(ys)(f)) + def nestedExists(p: T => Boolean): Boolean = xss match + case xs :: xss1 => xs.exists(p) || xss1.nestedExists(p) + case nil => false + end extension + + extension [T](xs: Seq[T]) + final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.cap)): Seq[U] = + xs.collect(pf.asInstanceOf) + + extension [A, B](f: PartialFunction[A, B] @retains(caps.cap)) + def orElseCC(g: PartialFunction[A, B] @retains(caps.cap)): PartialFunction[A, B] @retains(f, g) = + f.orElse(g.asInstanceOf).asInstanceOf + + extension (text: Text) + def show(using Context): String = text.mkString(ctx.settings.pageWidth.value, ctx.settings.printLines.value) + + /** Test whether a list of strings representing phases contains + * a given phase. See [[config.CompilerCommand#explainAdvanced]] for the + * exact meaning of "contains" here. + */ + extension (names: List[String]) + def containsPhase(phase: Phase): Boolean = + names.nonEmpty && { + phase match { + case phase: MegaPhase => phase.miniPhases.exists(x => names.containsPhase(x)) + case _ => + names exists { name => + name == "all" || { + val strippedName = name.stripSuffix("+") + val logNextPhase = name != strippedName + phase.phaseName.startsWith(strippedName) || + (logNextPhase && phase.prev.phaseName.startsWith(strippedName)) + } + } + } + } + + extension [T](x: T) + def showing[U]( + op: WrappedResult[U] ?=> String, + printer: config.Printers.Printer = config.Printers.default)(using c: Conversion[T, U] | Null = null): T = { + // either the use of `$result` was driven by the expected type of `Shown` + // which led to the summoning of `Conversion[T, Shown]` (which we'll invoke) + // or no such conversion was found so we'll consume the result as it is instead + val obj = if c == null then x.asInstanceOf[U] else c(x) + printer.println(op(using WrappedResult(obj))) + x + } + + /** Instead of `toString` call `show` on `Showable` values, falling back to `toString` if an exception is raised. */ + def tryToShow(using Context): String = x match + case x: Showable => + try x.show + catch + case ex: CyclicReference => "... (caught cyclic reference) ..." + case NonFatal(ex) + if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => + val msg = ex match + case te: TypeError => te.toMessage.message + case _ => ex.getMessage + s"[cannot display due to $msg, raw string = $x]" + case _ => String.valueOf(x).nn + + /** Returns the simple class name of `x`. */ + def className: String = getClass.getSimpleName.nn + + extension [T](x: T) + def assertingErrorsReported(using Context): T = { + assert(ctx.reporter.errorsReported) + x + } + def assertingErrorsReported(msg: Message)(using Context): T = { + assert(ctx.reporter.errorsReported, msg) + x + } + + extension [T <: AnyRef](xs: ::[T]) + def derivedCons(x1: T, xs1: List[T]) = + if (xs.head eq x1) && (xs.tail eq xs1) then xs else x1 :: xs1 + + extension (sc: StringContext) + + /** General purpose string formatting */ + def i(args: Shown*)(using Context): String = + new StringFormatter(sc).assemble(args) + + /** Interpolator yielding an error message, which undergoes + * the formatting defined in Message. + */ + def em(args: Shown*)(using Context): NoExplanation = + NoExplanation(i(args*)) + + extension [T <: AnyRef](arr: Array[T]) + def binarySearch(x: T | Null): Int = java.util.Arrays.binarySearch(arr.asInstanceOf[Array[Object | Null]], x) + +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala new file mode 100644 index 000000000000..603088dd8f26 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala @@ -0,0 +1,2434 @@ +package dotty.tools +package dotc +package core + +import scala.annotation.{threadUnsafe => tu} +import Types._, Contexts._, Symbols._, SymDenotations._, StdNames._, Names._, Phases._ +import Flags._, Scopes._, Decorators._, NameOps._, Periods._, NullOpsDecorator._ +import unpickleScala2.Scala2Unpickler.ensureConstructor +import scala.collection.mutable +import collection.mutable +import Denotations.{SingleDenotation, staticRef} +import util.{SimpleIdentityMap, SourceFile, NoSource} +import typer.ImportInfo.RootRef +import Comments.CommentsContext +import Comments.Comment +import util.Spans.NoSpan +import config.Feature +import Symbols.requiredModuleRef +import cc.{CapturingType, CaptureSet, EventuallyCapturingType} + +import scala.annotation.tailrec +import language.experimental.pureFunctions + +object Definitions { + + /** The maximum number of elements in a tuple or product. + * This should be removed once we go to hlists. + */ + val MaxTupleArity: Int = 22 + + /** The maximum arity N of a function type that's implemented + * as a trait `scala.FunctionN`. Functions of higher arity are possible, + * but are mapped in erasure to functions taking a single parameter of type + * Object[]. + * The limit 22 is chosen for Scala2x interop. It could be something + * else without affecting the set of programs that can be compiled. + */ + val MaxImplementedFunctionArity: Int = MaxTupleArity +} + +/** A class defining symbols and types of standard definitions + * + */ +class Definitions { + import Definitions._ + + private var initCtx: DetachedContext = _ + private given currentContext[Dummy_so_its_a_def]: DetachedContext = initCtx + + private def newPermanentSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) = + newSymbol(owner, name, flags | Permanent, info) + + private def newPermanentClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) = + newClassSymbol(owner, name, flags | Permanent | NoInits | Open, infoFn) + + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef]): ClassSymbol = + enterCompleteClassSymbol(owner, name, flags, parents, newScope(owner.nestingLevel + 1)) + + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = + newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered + + private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + scope.enter(newPermanentSymbol(cls, name, flags, TypeBounds.empty)) + + private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope) + + private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") = + enterTypeParam(cls, suffix.toTypeName, paramFlags, scope) + + // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only + // implemented in Dotty and not in Scala 2. + // See . + private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: -> Seq[Type]): ClassSymbol = { + val completer = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val cls = denot.asClass.classSymbol + val paramDecls = newScope + val typeParam = enterSyntheticTypeParam(cls, paramFlags, paramDecls) + def instantiate(tpe: Type) = + if (tpe.typeParams.nonEmpty) tpe.appliedTo(typeParam.typeRef) + else tpe + val parents = parentConstrs.toList map instantiate + denot.info = ClassInfo(ScalaPackageClass.thisType, cls, parents, paramDecls) + } + } + newPermanentClassSymbol(ScalaPackageClass, name, Artifact, completer).entered + } + + /** The trait FunctionN, ContextFunctionN, ErasedFunctionN or ErasedContextFunction, for some N + * @param name The name of the trait to be created + * + * FunctionN traits follow this template: + * + * trait FunctionN[-T0,...-T{N-1}, +R] extends Object { + * def apply($x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * That is, they follow the template given for Function2..Function22 in the + * standard library, but without `tupled` and `curried` methods and without + * a `toString`. + * + * ContextFunctionN traits follow this template: + * + * trait ContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { + * def apply(using $x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * ErasedFunctionN traits follow this template: + * + * trait ErasedFunctionN[-T0,...,-T{N-1}, +R] extends Object { + * def apply(erased $x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * ErasedContextFunctionN traits follow this template: + * + * trait ErasedContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { + * def apply(using erased $x0: T0, ..., $x{N_1}: T{N-1}): R + * } + * + * ErasedFunctionN and ErasedContextFunctionN erase to Function0. + * + * ImpureXYZFunctionN follow this template: + * + * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {*} XYZFunctionN[T0,...,T{N-1}, R] + */ + private def newFunctionNType(name: TypeName): Symbol = { + val impure = name.startsWith("Impure") + val completer = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val arity = name.functionArity + if impure then + val argParamNames = List.tabulate(arity)(tpnme.syntheticTypeParamName) + val argVariances = List.fill(arity)(Contravariant) + val underlyingName = name.asSimpleName.drop(6) + val underlyingClass = ScalaPackageVal.requiredClass(underlyingName) + denot.info = TypeAlias( + HKTypeLambda(argParamNames :+ "R".toTypeName, argVariances :+ Covariant)( + tl => List.fill(arity + 1)(TypeBounds.empty), + tl => CapturingType(underlyingClass.typeRef.appliedTo(tl.paramRefs), + CaptureSet.universal) + )) + else + val cls = denot.asClass.classSymbol + val decls = newScope + val paramNamePrefix = tpnme.scala ++ str.NAME_JOIN ++ name ++ str.EXPAND_SEPARATOR + val argParamRefs = List.tabulate(arity) { i => + enterTypeParam(cls, paramNamePrefix ++ "T" ++ (i + 1).toString, Contravariant, decls).typeRef + } + val resParamRef = enterTypeParam(cls, paramNamePrefix ++ "R", Covariant, decls).typeRef + val methodType = MethodType.companion( + isContextual = name.isContextFunction, + isImplicit = false, + isErased = name.isErasedFunction) + decls.enter(newMethod(cls, nme.apply, methodType(argParamRefs, resParamRef), Deferred)) + denot.info = + ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) + } + } + if impure then + newPermanentSymbol(ScalaPackageClass, name, EmptyFlags, completer) + else + newPermanentClassSymbol(ScalaPackageClass, name, Trait | NoInits, completer) + } + + private def newMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = + newPermanentSymbol(cls, name, flags | Method, info).asTerm + + private def enterMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = + newMethod(cls, name, info, flags).entered + + private def enterPermanentSymbol(name: Name, info: Type, flags: FlagSet = EmptyFlags): Symbol = + val sym = newPermanentSymbol(ScalaPackageClass, name, flags, info) + ScalaPackageClass.currentPackageDecls.enter(sym) + sym + + private def enterAliasType(name: TypeName, tpe: Type, flags: FlagSet = EmptyFlags): TypeSymbol = + enterPermanentSymbol(name, TypeAlias(tpe), flags).asType + + private def enterBinaryAlias(name: TypeName, op: (Type, Type) => Type): TypeSymbol = + enterAliasType(name, + HKTypeLambda(TypeBounds.empty :: TypeBounds.empty :: Nil)( + tl => op(tl.paramRefs(0), tl.paramRefs(1)))) + + private def enterPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int, + resultTypeFn: PolyType -> Type, + flags: FlagSet = EmptyFlags, + bounds: TypeBounds = TypeBounds.empty, + useCompleter: Boolean = false) = { + val tparamNames = PolyType.syntheticParamNames(typeParamCount) + val tparamInfos = tparamNames map (_ => bounds) + def ptype = PolyType(tparamNames)(_ => tparamInfos, resultTypeFn) + val info = + if (useCompleter) + new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = + denot.info = ptype + } + else ptype + enterMethod(cls, name, info, flags) + } + + private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType -> Type, flags: FlagSet) = + enterPolyMethod(cls, name, 1, resultTypeFn, flags) + + private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef | Null] = { + val arr = new Array[TypeRef | Null](arity + 1) + for (i <- countFrom to arity) arr(i) = requiredClassRef(name + i) + arr + } + + private def completeClass(cls: ClassSymbol, ensureCtor: Boolean = true): ClassSymbol = { + if (ensureCtor) ensureConstructor(cls, cls.denot.asClass, EmptyScope) + if (cls.linkedClass.exists) cls.linkedClass.markAbsent() + cls + } + + @tu lazy val RootClass: ClassSymbol = newPackageSymbol( + NoSymbol, nme.ROOT, (root, rootcls) => ctx.base.rootLoader(root)).moduleClass.asClass + @tu lazy val RootPackage: TermSymbol = newSymbol( + NoSymbol, nme.ROOTPKG, PackageCreationFlags, TypeRef(NoPrefix, RootClass)) + + @tu lazy val EmptyPackageVal: TermSymbol = newPackageSymbol( + RootClass, nme.EMPTY_PACKAGE, (emptypkg, emptycls) => ctx.base.rootLoader(emptypkg)).entered + @tu lazy val EmptyPackageClass: ClassSymbol = EmptyPackageVal.moduleClass.asClass + + /** A package in which we can place all methods and types that are interpreted specially by the compiler */ + @tu lazy val OpsPackageVal: TermSymbol = newCompletePackageSymbol(RootClass, nme.OPS_PACKAGE).entered + @tu lazy val OpsPackageClass: ClassSymbol = OpsPackageVal.moduleClass.asClass + + @tu lazy val ScalaPackageVal: TermSymbol = requiredPackage(nme.scala) + @tu lazy val ScalaMathPackageVal: TermSymbol = requiredPackage("scala.math") + @tu lazy val ScalaPackageClass: ClassSymbol = { + val cls = ScalaPackageVal.moduleClass.asClass + cls.info.decls.openForMutations.useSynthesizer( + name => + if (name.isTypeName && name.isSyntheticFunction) newFunctionNType(name.asTypeName) + else NoSymbol) + cls + } + @tu lazy val ScalaPackageObject: Symbol = requiredModule("scala.package") + @tu lazy val ScalaRuntimePackageVal: TermSymbol = requiredPackage("scala.runtime") + @tu lazy val ScalaRuntimePackageClass: ClassSymbol = ScalaRuntimePackageVal.moduleClass.asClass + @tu lazy val JavaPackageVal: TermSymbol = requiredPackage(nme.java) + @tu lazy val JavaPackageClass: ClassSymbol = JavaPackageVal.moduleClass.asClass + @tu lazy val JavaLangPackageVal: TermSymbol = requiredPackage(jnme.JavaLang) + @tu lazy val JavaLangPackageClass: ClassSymbol = JavaLangPackageVal.moduleClass.asClass + + // fundamental modules + @tu lazy val SysPackage : Symbol = requiredModule("scala.sys.package") + @tu lazy val Sys_error: Symbol = SysPackage.moduleClass.requiredMethod(nme.error) + + @tu lazy val ScalaXmlPackageClass: Symbol = getPackageClassIfDefined("scala.xml") + + @tu lazy val CompiletimePackageClass: Symbol = requiredPackage("scala.compiletime").moduleClass + @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") + @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") + @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") + @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) + @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") + @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") + @tu lazy val Compiletime_constValueOpt: Symbol = CompiletimePackageClass.requiredMethod("constValueOpt") + @tu lazy val Compiletime_summonFrom : Symbol = CompiletimePackageClass.requiredMethod("summonFrom") + @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") + @tu lazy val CompiletimeTestingPackage: Symbol = requiredPackage("scala.compiletime.testing") + @tu lazy val CompiletimeTesting_typeChecks: Symbol = CompiletimeTestingPackage.requiredMethod("typeChecks") + @tu lazy val CompiletimeTesting_typeCheckErrors: Symbol = CompiletimeTestingPackage.requiredMethod("typeCheckErrors") + @tu lazy val CompiletimeTesting_ErrorClass: ClassSymbol = requiredClass("scala.compiletime.testing.Error") + @tu lazy val CompiletimeTesting_Error: Symbol = requiredModule("scala.compiletime.testing.Error") + @tu lazy val CompiletimeTesting_Error_apply = CompiletimeTesting_Error.requiredMethod(nme.apply) + @tu lazy val CompiletimeTesting_ErrorKind: Symbol = requiredModule("scala.compiletime.testing.ErrorKind") + @tu lazy val CompiletimeTesting_ErrorKind_Parser: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Parser") + @tu lazy val CompiletimeTesting_ErrorKind_Typer: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Typer") + @tu lazy val CompiletimeOpsPackage: Symbol = requiredPackage("scala.compiletime.ops") + @tu lazy val CompiletimeOpsAnyModuleClass: Symbol = requiredModule("scala.compiletime.ops.any").moduleClass + @tu lazy val CompiletimeOpsIntModuleClass: Symbol = requiredModule("scala.compiletime.ops.int").moduleClass + @tu lazy val CompiletimeOpsLongModuleClass: Symbol = requiredModule("scala.compiletime.ops.long").moduleClass + @tu lazy val CompiletimeOpsFloatModuleClass: Symbol = requiredModule("scala.compiletime.ops.float").moduleClass + @tu lazy val CompiletimeOpsDoubleModuleClass: Symbol = requiredModule("scala.compiletime.ops.double").moduleClass + @tu lazy val CompiletimeOpsStringModuleClass: Symbol = requiredModule("scala.compiletime.ops.string").moduleClass + @tu lazy val CompiletimeOpsBooleanModuleClass: Symbol = requiredModule("scala.compiletime.ops.boolean").moduleClass + + /** Note: We cannot have same named methods defined in Object and Any (and AnyVal, for that matter) + * because after erasure the Any and AnyVal references get remapped to the Object methods + * which would result in a double binding assertion failure. + * Instead we do the following: + * + * - Have some methods exist only in Any, and remap them with the Erasure denotation + * transformer to be owned by Object. + * - Have other methods exist only in Object. + * To achieve this, we synthesize all Any and Object methods; Object methods no longer get + * loaded from a classfile. + */ + @tu lazy val AnyClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Any, Abstract, Nil), ensureCtor = false) + def AnyType: TypeRef = AnyClass.typeRef + @tu lazy val MatchableClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Matchable, Trait, AnyType :: Nil), ensureCtor = false) + def MatchableType: TypeRef = MatchableClass.typeRef + @tu lazy val AnyValClass: ClassSymbol = + val res = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.AnyVal, Abstract, List(AnyType, MatchableType))) + // Mark companion as absent, so that class does not get re-completed + val companion = ScalaPackageVal.info.decl(nme.AnyVal).symbol + companion.moduleClass.markAbsent() + companion.markAbsent() + res + + def AnyValType: TypeRef = AnyValClass.typeRef + + @tu lazy val Any_== : TermSymbol = enterMethod(AnyClass, nme.EQ, methOfAny(BooleanType), Final) + @tu lazy val Any_!= : TermSymbol = enterMethod(AnyClass, nme.NE, methOfAny(BooleanType), Final) + @tu lazy val Any_equals: TermSymbol = enterMethod(AnyClass, nme.equals_, methOfAny(BooleanType)) + @tu lazy val Any_hashCode: TermSymbol = enterMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType)) + @tu lazy val Any_toString: TermSymbol = enterMethod(AnyClass, nme.toString_, MethodType(Nil, StringType)) + @tu lazy val Any_## : TermSymbol = enterMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final) + @tu lazy val Any_isInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final) + @tu lazy val Any_asInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, _.paramRefs(0), Final) + @tu lazy val Any_typeTest: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOfPM, _ => BooleanType, Final | SyntheticArtifact) + @tu lazy val Any_typeCast: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOfPM, _.paramRefs(0), Final | SyntheticArtifact | StableRealizable) + // generated by pattern matcher and explicit nulls, eliminated by erasure + + /** def getClass[A >: this.type](): Class[? <: A] */ + @tu lazy val Any_getClass: TermSymbol = + enterPolyMethod( + AnyClass, nme.getClass_, 1, + pt => MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.upper(pt.paramRefs(0)))), + Final, + bounds = TypeBounds.lower(AnyClass.thisType)) + + def AnyMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, + Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_asInstanceOf, Any_typeTest, Any_typeCast) + + @tu lazy val ObjectClass: ClassSymbol = { + val cls = requiredClass("java.lang.Object") + assert(!cls.isCompleted, "race for completing java.lang.Object") + cls.info = ClassInfo(cls.owner.thisType, cls, List(AnyType, MatchableType), newScope) + cls.setFlag(NoInits | JavaDefined) + + ensureConstructor(cls, cls.denot.asClass, EmptyScope) + val companion = JavaLangPackageVal.info.decl(nme.Object).symbol.asTerm + NamerOps.makeConstructorCompanion(companion, cls) + cls + } + def ObjectType: TypeRef = ObjectClass.typeRef + + /** A type alias of Object used to represent any reference to Object in a Java + * signature, the secret sauce is that subtype checking treats it specially: + * + * tp <:< FromJavaObject + * + * is equivalent to: + * + * tp <:< Any + * + * This is useful to avoid usability problems when interacting with Java + * code where Object is the top type. This is safe because this type will + * only appear in signatures of Java definitions in positions where `Object` + * might appear, let's enumerate all possible cases this gives us: + * + * 1. At the top level: + * + * // A.java + * void meth1(Object arg) {} + * void meth2(T arg) {} // T implicitly extends Object + * + * // B.scala + * meth1(1) // OK + * meth2(1) // OK + * + * This is safe even though Int is not a subtype of Object, because Erasure + * will detect the mismatch and box the value type. + * + * 2. In a class type parameter: + * + * // A.java + * void meth3(scala.List arg) {} + * void meth4(scala.List arg) {} + * + * // B.scala + * meth3(List[Int](1)) // OK + * meth4(List[Int](1)) // OK + * + * At erasure, type parameters are removed and value types are boxed. + * + * 3. As the type parameter of an array: + * + * // A.java + * void meth5(Object[] arg) {} + * void meth6(T[] arg) {} + * + * // B.scala + * meth5(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] + * meth6(Array[Int](1)) // error: Array[Int] is not a subtype of Array[T & Object] + * + * + * This is a bit more subtle: at erasure, Arrays keep their type parameter, + * and primitive Arrays are not subtypes of reference Arrays on the JVM, + * so we can't pass an Array of Int where a reference Array is expected. + * Array is invariant in Scala, so `meth5` is safe even if we use `FromJavaObject`, + * but generic Arrays are treated specially: we always add `& Object` (and here + * we mean the normal java.lang.Object type) to these types when they come from + * Java signatures (see `translateJavaArrayElementType`), this ensure that `meth6` + * is safe to use. + * + * 4. As the repeated argument of a varargs method: + * + * // A.java + * void meth7(Object... args) {} + * void meth8(T... args) {} + * + * // B.scala + * meth7(1) // OK (creates a reference array) + * meth8(1) // OK (creates a primitive array and copies it into a reference array at Erasure) + * val ai = Array[Int](1) + * meth7(ai: _*) // OK (will copy the array at Erasure) + * meth8(ai: _*) // OK (will copy the array at Erasure) + * + * Java repeated arguments are erased to arrays, so it would be safe to treat + * them in the same way: add an `& Object` to the parameter type to disallow + * passing primitives, but that would be very inconvenient as it is common to + * want to pass a primitive to an Object repeated argument (e.g. + * `String.format("foo: %d", 1)`). So instead we type them _without_ adding the + * `& Object` and let `ElimRepeated` and `Erasure` take care of doing any necessary adaptation + * (note that adapting a primitive array to a reference array requires + * copying the whole array, so this transformation only preserves semantics + * if the callee does not try to mutate the varargs array which is a reasonable + * assumption to make). + * + * + * This mechanism is similar to `ObjectTpeJavaRef` in Scala 2, except that we + * create a new symbol with its own name, this is needed because this type + * can show up in inferred types and therefore needs to be preserved when + * pickling so that unpickled trees pass `-Ycheck`. + * + * Note that by default we pretty-print `FromJavaObject` as `Object` or simply omit it + * if it's the sole upper-bound of a type parameter, use `-Yprint-debug` to explicitly + * display it. + */ + @tu lazy val FromJavaObjectSymbol: TypeSymbol = + newPermanentSymbol(OpsPackageClass, tpnme.FromJavaObject, JavaDefined, TypeAlias(ObjectType)).entered + def FromJavaObjectType: TypeRef = FromJavaObjectSymbol.typeRef + + @tu lazy val AnyRefAlias: TypeSymbol = enterAliasType(tpnme.AnyRef, ObjectType) + def AnyRefType: TypeRef = AnyRefAlias.typeRef + + @tu lazy val Object_eq: TermSymbol = enterMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final) + @tu lazy val Object_ne: TermSymbol = enterMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final) + @tu lazy val Object_synchronized: TermSymbol = enterPolyMethod(ObjectClass, nme.synchronized_, 1, + pt => MethodType(List(pt.paramRefs(0)), pt.paramRefs(0)), Final) + @tu lazy val Object_clone: TermSymbol = enterMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected) + @tu lazy val Object_finalize: TermSymbol = enterMethod(ObjectClass, nme.finalize_, MethodType(Nil, UnitType), Protected) + @tu lazy val Object_notify: TermSymbol = enterMethod(ObjectClass, nme.notify_, MethodType(Nil, UnitType), Final) + @tu lazy val Object_notifyAll: TermSymbol = enterMethod(ObjectClass, nme.notifyAll_, MethodType(Nil, UnitType), Final) + @tu lazy val Object_wait: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(Nil, UnitType), Final) + @tu lazy val Object_waitL: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: Nil, UnitType), Final) + @tu lazy val Object_waitLI: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: IntType :: Nil, UnitType), Final) + + def ObjectMethods: List[TermSymbol] = List(Object_eq, Object_ne, Object_synchronized, Object_clone, + Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI) + + /** Methods in Object and Any that do not have a side effect */ + @tu lazy val pureMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, + Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_typeTest, Object_eq, Object_ne) + + @tu lazy val AnyKindClass: ClassSymbol = { + val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil, newScope(0)) + if (!ctx.settings.YnoKindPolymorphism.value) + // Enable kind-polymorphism by exposing scala.AnyKind + cls.entered + cls + } + def AnyKindType: TypeRef = AnyKindClass.typeRef + + @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) + @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) + + /** Method representing a throw */ + @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, + MethodType(List(ThrowableType), NothingType)) + + @tu lazy val NothingClass: ClassSymbol = enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyType)) + def NothingType: TypeRef = NothingClass.typeRef + @tu lazy val NullClass: ClassSymbol = { + // When explicit-nulls is enabled, Null becomes a direct subtype of Any and Matchable + val parents = if ctx.explicitNulls then AnyType :: MatchableType :: Nil else ObjectType :: Nil + enterCompleteClassSymbol(ScalaPackageClass, tpnme.Null, AbstractFinal, parents) + } + def NullType: TypeRef = NullClass.typeRef + + @tu lazy val InvokerModule = requiredModule("scala.runtime.coverage.Invoker") + @tu lazy val InvokedMethodRef = InvokerModule.requiredMethodRef("invoked") + + @tu lazy val ImplicitScrutineeTypeSym = + newPermanentSymbol(ScalaPackageClass, tpnme.IMPLICITkw, EmptyFlags, TypeBounds.empty).entered + def ImplicitScrutineeTypeRef: TypeRef = ImplicitScrutineeTypeSym.typeRef + + @tu lazy val ScalaPredefModule: Symbol = requiredModule("scala.Predef") + @tu lazy val Predef_conforms : Symbol = ScalaPredefModule.requiredMethod(nme.conforms_) + @tu lazy val Predef_classOf : Symbol = ScalaPredefModule.requiredMethod(nme.classOf) + @tu lazy val Predef_identity : Symbol = ScalaPredefModule.requiredMethod(nme.identity) + @tu lazy val Predef_undefined: Symbol = ScalaPredefModule.requiredMethod(nme.???) + @tu lazy val ScalaPredefModuleClass: ClassSymbol = ScalaPredefModule.moduleClass.asClass + + @tu lazy val SubTypeClass: ClassSymbol = requiredClass("scala.<:<") + @tu lazy val SubType_refl: Symbol = SubTypeClass.companionModule.requiredMethod(nme.refl) + + @tu lazy val DummyImplicitClass: ClassSymbol = requiredClass("scala.DummyImplicit") + + @tu lazy val ScalaRuntimeModule: Symbol = requiredModule("scala.runtime.ScalaRunTime") + def runtimeMethodRef(name: PreName): TermRef = ScalaRuntimeModule.requiredMethodRef(name) + def ScalaRuntime_drop: Symbol = runtimeMethodRef(nme.drop).symbol + @tu lazy val ScalaRuntime__hashCode: Symbol = ScalaRuntimeModule.requiredMethod(nme._hashCode_) + @tu lazy val ScalaRuntime_toArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toArray) + @tu lazy val ScalaRuntime_toObjectArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toObjectArray) + + @tu lazy val BoxesRunTimeModule: Symbol = requiredModule("scala.runtime.BoxesRunTime") + @tu lazy val BoxesRunTimeModule_externalEquals: Symbol = BoxesRunTimeModule.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol + @tu lazy val ScalaStaticsModule: Symbol = requiredModule("scala.runtime.Statics") + def staticsMethodRef(name: PreName): TermRef = ScalaStaticsModule.requiredMethodRef(name) + def staticsMethod(name: PreName): TermSymbol = ScalaStaticsModule.requiredMethod(name) + + @tu lazy val DottyArraysModule: Symbol = requiredModule("scala.runtime.Arrays") + def newGenericArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") + def newArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newArray") + + def getWrapVarargsArrayModule: Symbol = ScalaRuntimeModule + + // The set of all wrap{X, Ref}Array methods, where X is a value type + val WrapArrayMethods: PerRun[collection.Set[Symbol]] = new PerRun({ + val methodNames = ScalaValueTypes.map(ast.tpd.wrapArrayMethodName) `union` Set(nme.wrapRefArray) + methodNames.map(getWrapVarargsArrayModule.requiredMethod(_)) + }) + + @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") + @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") + @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") + @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") + @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") + + @tu lazy val SingletonClass: ClassSymbol = + // needed as a synthetic class because Scala 2.x refers to it in classfiles + // but does not define it as an explicit class. + enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final, + List(AnyType), EmptyScope) + @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef + + @tu lazy val CollectionSeqType: TypeRef = requiredClassRef("scala.collection.Seq") + @tu lazy val SeqType: TypeRef = requiredClassRef("scala.collection.immutable.Seq") + def SeqClass(using Context): ClassSymbol = SeqType.symbol.asClass + @tu lazy val Seq_apply : Symbol = SeqClass.requiredMethod(nme.apply) + @tu lazy val Seq_head : Symbol = SeqClass.requiredMethod(nme.head) + @tu lazy val Seq_drop : Symbol = SeqClass.requiredMethod(nme.drop) + @tu lazy val Seq_lengthCompare: Symbol = SeqClass.requiredMethod(nme.lengthCompare, List(IntType)) + @tu lazy val Seq_length : Symbol = SeqClass.requiredMethod(nme.length) + @tu lazy val Seq_toSeq : Symbol = SeqClass.requiredMethod(nme.toSeq) + @tu lazy val SeqModule: Symbol = requiredModule("scala.collection.immutable.Seq") + + + @tu lazy val StringOps: Symbol = requiredClass("scala.collection.StringOps") + @tu lazy val StringOps_format: Symbol = StringOps.requiredMethod(nme.format) + + @tu lazy val ArrayType: TypeRef = requiredClassRef("scala.Array") + def ArrayClass(using Context): ClassSymbol = ArrayType.symbol.asClass + @tu lazy val Array_apply : Symbol = ArrayClass.requiredMethod(nme.apply) + @tu lazy val Array_update : Symbol = ArrayClass.requiredMethod(nme.update) + @tu lazy val Array_length : Symbol = ArrayClass.requiredMethod(nme.length) + @tu lazy val Array_clone : Symbol = ArrayClass.requiredMethod(nme.clone_) + @tu lazy val ArrayConstructor: Symbol = ArrayClass.requiredMethod(nme.CONSTRUCTOR) + + @tu lazy val ArrayModule: Symbol = requiredModule("scala.Array") + def ArrayModuleClass: Symbol = ArrayModule.moduleClass + + @tu lazy val IArrayModule: Symbol = requiredModule("scala.IArray") + def IArrayModuleClass: Symbol = IArrayModule.moduleClass + + @tu lazy val UnitType: TypeRef = valueTypeRef("scala.Unit", java.lang.Void.TYPE, UnitEnc, nme.specializedTypeNames.Void) + def UnitClass(using Context): ClassSymbol = UnitType.symbol.asClass + def UnitModuleClass(using Context): Symbol = UnitType.symbol.asClass.linkedClass + @tu lazy val BooleanType: TypeRef = valueTypeRef("scala.Boolean", java.lang.Boolean.TYPE, BooleanEnc, nme.specializedTypeNames.Boolean) + def BooleanClass(using Context): ClassSymbol = BooleanType.symbol.asClass + @tu lazy val Boolean_! : Symbol = BooleanClass.requiredMethod(nme.UNARY_!) + @tu lazy val Boolean_&& : Symbol = BooleanClass.requiredMethod(nme.ZAND) // ### harmonize required... calls + @tu lazy val Boolean_|| : Symbol = BooleanClass.requiredMethod(nme.ZOR) + @tu lazy val Boolean_== : Symbol = + BooleanClass.info.member(nme.EQ).suchThat(_.info.firstParamTypes match { + case List(pt) => pt.isRef(BooleanClass) + case _ => false + }).symbol + @tu lazy val Boolean_!= : Symbol = + BooleanClass.info.member(nme.NE).suchThat(_.info.firstParamTypes match { + case List(pt) => pt.isRef(BooleanClass) + case _ => false + }).symbol + + @tu lazy val ByteType: TypeRef = valueTypeRef("scala.Byte", java.lang.Byte.TYPE, ByteEnc, nme.specializedTypeNames.Byte) + def ByteClass(using Context): ClassSymbol = ByteType.symbol.asClass + @tu lazy val ShortType: TypeRef = valueTypeRef("scala.Short", java.lang.Short.TYPE, ShortEnc, nme.specializedTypeNames.Short) + def ShortClass(using Context): ClassSymbol = ShortType.symbol.asClass + @tu lazy val CharType: TypeRef = valueTypeRef("scala.Char", java.lang.Character.TYPE, CharEnc, nme.specializedTypeNames.Char) + def CharClass(using Context): ClassSymbol = CharType.symbol.asClass + @tu lazy val IntType: TypeRef = valueTypeRef("scala.Int", java.lang.Integer.TYPE, IntEnc, nme.specializedTypeNames.Int) + def IntClass(using Context): ClassSymbol = IntType.symbol.asClass + @tu lazy val Int_- : Symbol = IntClass.requiredMethod(nme.MINUS, List(IntType)) + @tu lazy val Int_+ : Symbol = IntClass.requiredMethod(nme.PLUS, List(IntType)) + @tu lazy val Int_/ : Symbol = IntClass.requiredMethod(nme.DIV, List(IntType)) + @tu lazy val Int_* : Symbol = IntClass.requiredMethod(nme.MUL, List(IntType)) + @tu lazy val Int_== : Symbol = IntClass.requiredMethod(nme.EQ, List(IntType)) + @tu lazy val Int_>= : Symbol = IntClass.requiredMethod(nme.GE, List(IntType)) + @tu lazy val Int_<= : Symbol = IntClass.requiredMethod(nme.LE, List(IntType)) + @tu lazy val LongType: TypeRef = valueTypeRef("scala.Long", java.lang.Long.TYPE, LongEnc, nme.specializedTypeNames.Long) + def LongClass(using Context): ClassSymbol = LongType.symbol.asClass + @tu lazy val Long_+ : Symbol = LongClass.requiredMethod(nme.PLUS, List(LongType)) + @tu lazy val Long_* : Symbol = LongClass.requiredMethod(nme.MUL, List(LongType)) + @tu lazy val Long_/ : Symbol = LongClass.requiredMethod(nme.DIV, List(LongType)) + + @tu lazy val FloatType: TypeRef = valueTypeRef("scala.Float", java.lang.Float.TYPE, FloatEnc, nme.specializedTypeNames.Float) + def FloatClass(using Context): ClassSymbol = FloatType.symbol.asClass + @tu lazy val DoubleType: TypeRef = valueTypeRef("scala.Double", java.lang.Double.TYPE, DoubleEnc, nme.specializedTypeNames.Double) + def DoubleClass(using Context): ClassSymbol = DoubleType.symbol.asClass + + @tu lazy val BoxedUnitClass: ClassSymbol = requiredClass("scala.runtime.BoxedUnit") + def BoxedUnit_UNIT(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("UNIT") + def BoxedUnit_TYPE(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("TYPE") + + @tu lazy val BoxedBooleanClass: ClassSymbol = requiredClass("java.lang.Boolean") + @tu lazy val BoxedByteClass : ClassSymbol = requiredClass("java.lang.Byte") + @tu lazy val BoxedShortClass : ClassSymbol = requiredClass("java.lang.Short") + @tu lazy val BoxedCharClass : ClassSymbol = requiredClass("java.lang.Character") + @tu lazy val BoxedIntClass : ClassSymbol = requiredClass("java.lang.Integer") + @tu lazy val BoxedLongClass : ClassSymbol = requiredClass("java.lang.Long") + @tu lazy val BoxedFloatClass : ClassSymbol = requiredClass("java.lang.Float") + @tu lazy val BoxedDoubleClass : ClassSymbol = requiredClass("java.lang.Double") + + @tu lazy val BoxedBooleanModule: TermSymbol = requiredModule("java.lang.Boolean") + @tu lazy val BoxedByteModule : TermSymbol = requiredModule("java.lang.Byte") + @tu lazy val BoxedShortModule : TermSymbol = requiredModule("java.lang.Short") + @tu lazy val BoxedCharModule : TermSymbol = requiredModule("java.lang.Character") + @tu lazy val BoxedIntModule : TermSymbol = requiredModule("java.lang.Integer") + @tu lazy val BoxedLongModule : TermSymbol = requiredModule("java.lang.Long") + @tu lazy val BoxedFloatModule : TermSymbol = requiredModule("java.lang.Float") + @tu lazy val BoxedDoubleModule : TermSymbol = requiredModule("java.lang.Double") + @tu lazy val BoxedUnitModule : TermSymbol = requiredModule("java.lang.Void") + + @tu lazy val ByNameParamClass2x: ClassSymbol = enterSpecialPolyClass(tpnme.BYNAME_PARAM_CLASS, Covariant, Seq(AnyType)) + + @tu lazy val RepeatedParamClass: ClassSymbol = enterSpecialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType)) + + @tu lazy val IntoType: TypeSymbol = enterAliasType(tpnme.INTO, HKTypeLambda(TypeBounds.empty :: Nil)(_.paramRefs(0))) + + // fundamental classes + @tu lazy val StringClass: ClassSymbol = requiredClass("java.lang.String") + def StringType: Type = StringClass.typeRef + @tu lazy val StringModule: Symbol = StringClass.linkedClass + @tu lazy val String_+ : TermSymbol = enterMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final) + @tu lazy val String_valueOf_Object: Symbol = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match { + case List(pt) => pt.isAny || pt.stripNull.isAnyRef + case _ => false + }).symbol + + @tu lazy val JavaCloneableClass: ClassSymbol = requiredClass("java.lang.Cloneable") + @tu lazy val NullPointerExceptionClass: ClassSymbol = requiredClass("java.lang.NullPointerException") + @tu lazy val IndexOutOfBoundsException: ClassSymbol = requiredClass("java.lang.IndexOutOfBoundsException") + @tu lazy val ClassClass: ClassSymbol = requiredClass("java.lang.Class") + @tu lazy val BoxedNumberClass: ClassSymbol = requiredClass("java.lang.Number") + @tu lazy val ClassCastExceptionClass: ClassSymbol = requiredClass("java.lang.ClassCastException") + @tu lazy val ClassCastExceptionClass_stringConstructor: TermSymbol = ClassCastExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { + case List(pt) => + pt.stripNull.isRef(StringClass) + case _ => false + }).symbol.asTerm + @tu lazy val ArithmeticExceptionClass: ClassSymbol = requiredClass("java.lang.ArithmeticException") + @tu lazy val ArithmeticExceptionClass_stringConstructor: TermSymbol = ArithmeticExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { + case List(pt) => + pt.stripNull.isRef(StringClass) + case _ => false + }).symbol.asTerm + + @tu lazy val JavaSerializableClass: ClassSymbol = requiredClass("java.io.Serializable") + + @tu lazy val ComparableClass: ClassSymbol = requiredClass("java.lang.Comparable") + + @tu lazy val SystemClass: ClassSymbol = requiredClass("java.lang.System") + @tu lazy val SystemModule: Symbol = SystemClass.linkedClass + + @tu lazy val NoSuchElementExceptionClass = requiredClass("java.util.NoSuchElementException") + def NoSuchElementExceptionType = NoSuchElementExceptionClass.typeRef + @tu lazy val IllegalArgumentExceptionClass = requiredClass("java.lang.IllegalArgumentException") + def IllegalArgumentExceptionType = IllegalArgumentExceptionClass.typeRef + + // in scalac modified to have Any as parent + + @tu lazy val ThrowableType: TypeRef = requiredClassRef("java.lang.Throwable") + def ThrowableClass(using Context): ClassSymbol = ThrowableType.symbol.asClass + @tu lazy val ExceptionClass: ClassSymbol = requiredClass("java.lang.Exception") + @tu lazy val RuntimeExceptionClass: ClassSymbol = requiredClass("java.lang.RuntimeException") + + @tu lazy val SerializableType: TypeRef = JavaSerializableClass.typeRef + def SerializableClass(using Context): ClassSymbol = SerializableType.symbol.asClass + + @tu lazy val JavaBigIntegerClass: ClassSymbol = requiredClass("java.math.BigInteger") + @tu lazy val JavaBigDecimalClass: ClassSymbol = requiredClass("java.math.BigDecimal") + @tu lazy val JavaCalendarClass: ClassSymbol = requiredClass("java.util.Calendar") + @tu lazy val JavaDateClass: ClassSymbol = requiredClass("java.util.Date") + @tu lazy val JavaFormattableClass: ClassSymbol = requiredClass("java.util.Formattable") + + @tu lazy val JavaEnumClass: ClassSymbol = { + val cls = requiredClass("java.lang.Enum") + // jl.Enum has a single constructor protected(name: String, ordinal: Int). + // We remove the arguments from the primary constructor, and enter + // a new constructor symbol with 2 arguments, so that both + // `X extends jl.Enum[X]` and `X extends jl.Enum[X](name, ordinal)` + // pass typer and go through jl.Enum-specific checks in RefChecks. + cls.infoOrCompleter match { + case completer: ClassfileLoader => + cls.info = new ClassfileLoader(completer.classfile) { + override def complete(root: SymDenotation)(using Context): Unit = { + super.complete(root) + val constr = cls.primaryConstructor + val noArgInfo = constr.info match { + case info: PolyType => + info.resType match { + case meth: MethodType => + info.derivedLambdaType( + resType = meth.derivedLambdaType( + paramNames = Nil, paramInfos = Nil)) + } + } + val argConstr = constr.copy().entered + constr.info = noArgInfo + constr.termRef.recomputeDenot() + } + } + cls + } + } + def JavaEnumType = JavaEnumClass.typeRef + + @tu lazy val MethodHandleClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandle") + @tu lazy val MethodHandlesLookupClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandles.Lookup") + @tu lazy val VarHandleClass: ClassSymbol = requiredClass("java.lang.invoke.VarHandle") + + @tu lazy val StringBuilderClass: ClassSymbol = requiredClass("scala.collection.mutable.StringBuilder") + @tu lazy val MatchErrorClass : ClassSymbol = requiredClass("scala.MatchError") + @tu lazy val ConversionClass : ClassSymbol = requiredClass("scala.Conversion").typeRef.symbol.asClass + + @tu lazy val StringAddClass : ClassSymbol = requiredClass("scala.runtime.StringAdd") + @tu lazy val StringAdd_+ : Symbol = StringAddClass.requiredMethod(nme.raw.PLUS) + + @tu lazy val StringContextClass: ClassSymbol = requiredClass("scala.StringContext") + @tu lazy val StringContext_s : Symbol = StringContextClass.requiredMethod(nme.s) + @tu lazy val StringContext_raw: Symbol = StringContextClass.requiredMethod(nme.raw_) + @tu lazy val StringContext_f : Symbol = StringContextClass.requiredMethod(nme.f) + @tu lazy val StringContext_parts: Symbol = StringContextClass.requiredMethod(nme.parts) + @tu lazy val StringContextModule: Symbol = StringContextClass.companionModule + @tu lazy val StringContextModule_apply: Symbol = StringContextModule.requiredMethod(nme.apply) + @tu lazy val StringContextModule_standardInterpolator: Symbol = StringContextModule.requiredMethod(nme.standardInterpolator) + @tu lazy val StringContextModule_processEscapes: Symbol = StringContextModule.requiredMethod(nme.processEscapes) + + @tu lazy val PartialFunctionClass: ClassSymbol = requiredClass("scala.PartialFunction") + @tu lazy val PartialFunction_isDefinedAt: Symbol = PartialFunctionClass.requiredMethod(nme.isDefinedAt) + @tu lazy val PartialFunction_applyOrElse: Symbol = PartialFunctionClass.requiredMethod(nme.applyOrElse) + + @tu lazy val AbstractPartialFunctionClass: ClassSymbol = requiredClass("scala.runtime.AbstractPartialFunction") + @tu lazy val FunctionXXLClass: ClassSymbol = requiredClass("scala.runtime.FunctionXXL") + @tu lazy val ScalaSymbolClass: ClassSymbol = requiredClass("scala.Symbol") + @tu lazy val DynamicClass: ClassSymbol = requiredClass("scala.Dynamic") + @tu lazy val OptionClass: ClassSymbol = requiredClass("scala.Option") + @tu lazy val SomeClass: ClassSymbol = requiredClass("scala.Some") + @tu lazy val NoneModule: Symbol = requiredModule("scala.None") + + @tu lazy val EnumClass: ClassSymbol = requiredClass("scala.reflect.Enum") + @tu lazy val Enum_ordinal: Symbol = EnumClass.requiredMethod(nme.ordinal) + + @tu lazy val EnumValueSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.EnumValueSerializationProxy") + @tu lazy val EnumValueSerializationProxyConstructor: TermSymbol = + EnumValueSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty), IntType)) + + @tu lazy val ProductClass: ClassSymbol = requiredClass("scala.Product") + @tu lazy val Product_canEqual : Symbol = ProductClass.requiredMethod(nme.canEqual_) + @tu lazy val Product_productArity : Symbol = ProductClass.requiredMethod(nme.productArity) + @tu lazy val Product_productElement : Symbol = ProductClass.requiredMethod(nme.productElement) + @tu lazy val Product_productElementName: Symbol = ProductClass.requiredMethod(nme.productElementName) + @tu lazy val Product_productPrefix : Symbol = ProductClass.requiredMethod(nme.productPrefix) + + @tu lazy val IteratorClass: ClassSymbol = requiredClass("scala.collection.Iterator") + def IteratorModule(using Context): Symbol = IteratorClass.companionModule + + @tu lazy val ModuleSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.ModuleSerializationProxy") + @tu lazy val ModuleSerializationProxyConstructor: TermSymbol = + ModuleSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty))) + + @tu lazy val MirrorClass: ClassSymbol = requiredClass("scala.deriving.Mirror") + @tu lazy val Mirror_ProductClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Product") + @tu lazy val Mirror_Product_fromProduct: Symbol = Mirror_ProductClass.requiredMethod(nme.fromProduct) + @tu lazy val Mirror_SumClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Sum") + @tu lazy val Mirror_SingletonClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Singleton") + @tu lazy val Mirror_SingletonProxyClass: ClassSymbol = requiredClass("scala.deriving.Mirror.SingletonProxy") + + @tu lazy val LanguageModule: Symbol = requiredModule("scala.language") + @tu lazy val LanguageModuleClass: Symbol = LanguageModule.moduleClass.asClass + @tu lazy val LanguageExperimentalModule: Symbol = requiredModule("scala.language.experimental") + @tu lazy val LanguageDeprecatedModule: Symbol = requiredModule("scala.language.deprecated") + @tu lazy val NonLocalReturnControlClass: ClassSymbol = requiredClass("scala.runtime.NonLocalReturnControl") + @tu lazy val SelectableClass: ClassSymbol = requiredClass("scala.Selectable") + @tu lazy val WithoutPreciseParameterTypesClass: Symbol = requiredClass("scala.Selectable.WithoutPreciseParameterTypes") + + @tu lazy val ManifestClass: ClassSymbol = requiredClass("scala.reflect.Manifest") + @tu lazy val ManifestFactoryModule: Symbol = requiredModule("scala.reflect.ManifestFactory") + @tu lazy val ClassManifestFactoryModule: Symbol = requiredModule("scala.reflect.ClassManifestFactory") + @tu lazy val OptManifestClass: ClassSymbol = requiredClass("scala.reflect.OptManifest") + @tu lazy val NoManifestModule: Symbol = requiredModule("scala.reflect.NoManifest") + + @tu lazy val ReflectPackageClass: Symbol = requiredPackage("scala.reflect.package").moduleClass + @tu lazy val ClassTagClass: ClassSymbol = requiredClass("scala.reflect.ClassTag") + @tu lazy val ClassTagModule: Symbol = ClassTagClass.companionModule + @tu lazy val ClassTagModule_apply: Symbol = ClassTagModule.requiredMethod(nme.apply) + + @tu lazy val TypeTestClass: ClassSymbol = requiredClass("scala.reflect.TypeTest") + @tu lazy val TypeTest_unapply: Symbol = TypeTestClass.requiredMethod(nme.unapply) + @tu lazy val TypeTestModule_identity: Symbol = TypeTestClass.companionModule.requiredMethod(nme.identity) + + @tu lazy val QuotedExprClass: ClassSymbol = requiredClass("scala.quoted.Expr") + + @tu lazy val QuotesClass: ClassSymbol = requiredClass("scala.quoted.Quotes") + @tu lazy val Quotes_reflect: Symbol = QuotesClass.requiredValue("reflect") + @tu lazy val Quotes_reflect_asTerm: Symbol = Quotes_reflect.requiredMethod("asTerm") + @tu lazy val Quotes_reflect_Apply: Symbol = Quotes_reflect.requiredValue("Apply") + @tu lazy val Quotes_reflect_Apply_apply: Symbol = Quotes_reflect_Apply.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_TypeApply: Symbol = Quotes_reflect.requiredValue("TypeApply") + @tu lazy val Quotes_reflect_TypeApply_apply: Symbol = Quotes_reflect_TypeApply.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_Assign: Symbol = Quotes_reflect.requiredValue("Assign") + @tu lazy val Quotes_reflect_Assign_apply: Symbol = Quotes_reflect_Assign.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_Inferred: Symbol = Quotes_reflect.requiredValue("Inferred") + @tu lazy val Quotes_reflect_Inferred_apply: Symbol = Quotes_reflect_Inferred.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_Literal: Symbol = Quotes_reflect.requiredValue("Literal") + @tu lazy val Quotes_reflect_Literal_apply: Symbol = Quotes_reflect_Literal.requiredMethod(nme.apply) + @tu lazy val Quotes_reflect_TreeMethods: Symbol = Quotes_reflect.requiredMethod("TreeMethods") + @tu lazy val Quotes_reflect_TreeMethods_asExpr: Symbol = Quotes_reflect_TreeMethods.requiredMethod("asExpr") + @tu lazy val Quotes_reflect_TypeRepr: Symbol = Quotes_reflect.requiredValue("TypeRepr") + @tu lazy val Quotes_reflect_TypeRepr_of: Symbol = Quotes_reflect_TypeRepr.requiredMethod("of") + @tu lazy val Quotes_reflect_TypeRepr_typeConstructorOf: Symbol = Quotes_reflect_TypeRepr.requiredMethod("typeConstructorOf") + @tu lazy val Quotes_reflect_TypeReprMethods: Symbol = Quotes_reflect.requiredValue("TypeReprMethods") + @tu lazy val Quotes_reflect_TypeReprMethods_asType: Symbol = Quotes_reflect_TypeReprMethods.requiredMethod("asType") + @tu lazy val Quotes_reflect_TypeTreeType: Symbol = Quotes_reflect.requiredType("TypeTree") + @tu lazy val Quotes_reflect_TermType: Symbol = Quotes_reflect.requiredType("Term") + @tu lazy val Quotes_reflect_BooleanConstant: Symbol = Quotes_reflect.requiredValue("BooleanConstant") + @tu lazy val Quotes_reflect_ByteConstant: Symbol = Quotes_reflect.requiredValue("ByteConstant") + @tu lazy val Quotes_reflect_ShortConstant: Symbol = Quotes_reflect.requiredValue("ShortConstant") + @tu lazy val Quotes_reflect_IntConstant: Symbol = Quotes_reflect.requiredValue("IntConstant") + @tu lazy val Quotes_reflect_LongConstant: Symbol = Quotes_reflect.requiredValue("LongConstant") + @tu lazy val Quotes_reflect_FloatConstant: Symbol = Quotes_reflect.requiredValue("FloatConstant") + @tu lazy val Quotes_reflect_DoubleConstant: Symbol = Quotes_reflect.requiredValue("DoubleConstant") + @tu lazy val Quotes_reflect_CharConstant: Symbol = Quotes_reflect.requiredValue("CharConstant") + @tu lazy val Quotes_reflect_StringConstant: Symbol = Quotes_reflect.requiredValue("StringConstant") + @tu lazy val Quotes_reflect_UnitConstant: Symbol = Quotes_reflect.requiredValue("UnitConstant") + @tu lazy val Quotes_reflect_NullConstant: Symbol = Quotes_reflect.requiredValue("NullConstant") + @tu lazy val Quotes_reflect_ClassOfConstant: Symbol = Quotes_reflect.requiredValue("ClassOfConstant") + + + @tu lazy val QuoteUnpicklerClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteUnpickler") + @tu lazy val QuoteUnpickler_unpickleExprV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleExprV2") + @tu lazy val QuoteUnpickler_unpickleTypeV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleTypeV2") + + @tu lazy val QuoteMatchingClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteMatching") + @tu lazy val QuoteMatching_ExprMatch: Symbol = QuoteMatchingClass.requiredMethod("ExprMatch") + @tu lazy val QuoteMatching_TypeMatch: Symbol = QuoteMatchingClass.requiredMethod("TypeMatch") + + @tu lazy val ToExprModule: Symbol = requiredModule("scala.quoted.ToExpr") + @tu lazy val ToExprModule_BooleanToExpr: Symbol = ToExprModule.requiredMethod("BooleanToExpr") + @tu lazy val ToExprModule_ByteToExpr: Symbol = ToExprModule.requiredMethod("ByteToExpr") + @tu lazy val ToExprModule_ShortToExpr: Symbol = ToExprModule.requiredMethod("ShortToExpr") + @tu lazy val ToExprModule_IntToExpr: Symbol = ToExprModule.requiredMethod("IntToExpr") + @tu lazy val ToExprModule_LongToExpr: Symbol = ToExprModule.requiredMethod("LongToExpr") + @tu lazy val ToExprModule_FloatToExpr: Symbol = ToExprModule.requiredMethod("FloatToExpr") + @tu lazy val ToExprModule_DoubleToExpr: Symbol = ToExprModule.requiredMethod("DoubleToExpr") + @tu lazy val ToExprModule_CharToExpr: Symbol = ToExprModule.requiredMethod("CharToExpr") + @tu lazy val ToExprModule_StringToExpr: Symbol = ToExprModule.requiredMethod("StringToExpr") + + @tu lazy val QuotedRuntimeModule: Symbol = requiredModule("scala.quoted.runtime.Expr") + @tu lazy val QuotedRuntime_exprQuote : Symbol = QuotedRuntimeModule.requiredMethod("quote") + @tu lazy val QuotedRuntime_exprSplice : Symbol = QuotedRuntimeModule.requiredMethod("splice") + @tu lazy val QuotedRuntime_exprNestedSplice : Symbol = QuotedRuntimeModule.requiredMethod("nestedSplice") + + @tu lazy val QuotedRuntime_SplicedTypeAnnot: ClassSymbol = requiredClass("scala.quoted.runtime.SplicedType") + + @tu lazy val QuotedRuntimePatterns: Symbol = requiredModule("scala.quoted.runtime.Patterns") + @tu lazy val QuotedRuntimePatterns_patternHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHole") + @tu lazy val QuotedRuntimePatterns_patternHigherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHigherOrderHole") + @tu lazy val QuotedRuntimePatterns_higherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHole") + @tu lazy val QuotedRuntimePatterns_patternTypeAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("patternType") + @tu lazy val QuotedRuntimePatterns_fromAboveAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("fromAbove") + + @tu lazy val QuotedTypeClass: ClassSymbol = requiredClass("scala.quoted.Type") + @tu lazy val QuotedType_splice: Symbol = QuotedTypeClass.requiredType(tpnme.Underlying) + + @tu lazy val QuotedTypeModule: Symbol = QuotedTypeClass.companionModule + @tu lazy val QuotedTypeModule_of: Symbol = QuotedTypeModule.requiredMethod("of") + + @tu lazy val CanEqualClass: ClassSymbol = getClassIfDefined("scala.Eql").orElse(requiredClass("scala.CanEqual")).asClass + def CanEqual_canEqualAny(using Context): TermSymbol = + val methodName = if CanEqualClass.name == tpnme.Eql then nme.eqlAny else nme.canEqualAny + CanEqualClass.companionModule.requiredMethod(methodName) + + @tu lazy val CanThrowClass: ClassSymbol = requiredClass("scala.CanThrow") + @tu lazy val throwsAlias: Symbol = ScalaRuntimePackageVal.requiredType(tpnme.THROWS) + + @tu lazy val TypeBoxClass: ClassSymbol = requiredClass("scala.runtime.TypeBox") + @tu lazy val TypeBox_CAP: TypeSymbol = TypeBoxClass.requiredType(tpnme.CAP) + + @tu lazy val MatchCaseClass: ClassSymbol = requiredClass("scala.runtime.MatchCase") + @tu lazy val NotGivenClass: ClassSymbol = requiredClass("scala.util.NotGiven") + @tu lazy val NotGiven_value: Symbol = NotGivenClass.companionModule.requiredMethod(nme.value) + + @tu lazy val ValueOfClass: ClassSymbol = requiredClass("scala.ValueOf") + + @tu lazy val FromDigitsClass: ClassSymbol = requiredClass("scala.util.FromDigits") + @tu lazy val FromDigits_WithRadixClass: ClassSymbol = requiredClass("scala.util.FromDigits.WithRadix") + @tu lazy val FromDigits_DecimalClass: ClassSymbol = requiredClass("scala.util.FromDigits.Decimal") + @tu lazy val FromDigits_FloatingClass: ClassSymbol = requiredClass("scala.util.FromDigits.Floating") + + @tu lazy val XMLTopScopeModule: Symbol = requiredModule("scala.xml.TopScope") + + @tu lazy val MainAnnotationClass: ClassSymbol = requiredClass("scala.annotation.MainAnnotation") + @tu lazy val MainAnnotationInfo: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Info") + @tu lazy val MainAnnotationParameter: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Parameter") + @tu lazy val MainAnnotationParameterAnnotation: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.ParameterAnnotation") + @tu lazy val MainAnnotationCommand: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Command") + + @tu lazy val CommandLineParserModule: Symbol = requiredModule("scala.util.CommandLineParser") + @tu lazy val CLP_ParseError: ClassSymbol = CommandLineParserModule.requiredClass("ParseError").typeRef.symbol.asClass + @tu lazy val CLP_parseArgument: Symbol = CommandLineParserModule.requiredMethod("parseArgument") + @tu lazy val CLP_parseRemainingArguments: Symbol = CommandLineParserModule.requiredMethod("parseRemainingArguments") + @tu lazy val CLP_showError: Symbol = CommandLineParserModule.requiredMethod("showError") + + @tu lazy val TupleTypeRef: TypeRef = requiredClassRef("scala.Tuple") + def TupleClass(using Context): ClassSymbol = TupleTypeRef.symbol.asClass + @tu lazy val Tuple_cons: Symbol = TupleClass.requiredMethod("*:") + @tu lazy val EmptyTupleModule: Symbol = requiredModule("scala.EmptyTuple") + @tu lazy val NonEmptyTupleTypeRef: TypeRef = requiredClassRef("scala.NonEmptyTuple") + def NonEmptyTupleClass(using Context): ClassSymbol = NonEmptyTupleTypeRef.symbol.asClass + lazy val NonEmptyTuple_tail: Symbol = NonEmptyTupleClass.requiredMethod("tail") + @tu lazy val PairClass: ClassSymbol = requiredClass("scala.*:") + + @tu lazy val TupleXXLClass: ClassSymbol = requiredClass("scala.runtime.TupleXXL") + def TupleXXLModule(using Context): Symbol = TupleXXLClass.companionModule + + def TupleXXL_fromIterator(using Context): Symbol = TupleXXLModule.requiredMethod("fromIterator") + + @tu lazy val RuntimeTupleMirrorTypeRef: TypeRef = requiredClassRef("scala.runtime.TupleMirror") + + @tu lazy val RuntimeTuplesModule: Symbol = requiredModule("scala.runtime.Tuples") + @tu lazy val RuntimeTuplesModuleClass: Symbol = RuntimeTuplesModule.moduleClass + @tu lazy val RuntimeTuples_consIterator: Symbol = RuntimeTuplesModule.requiredMethod("consIterator") + @tu lazy val RuntimeTuples_concatIterator: Symbol = RuntimeTuplesModule.requiredMethod("concatIterator") + @tu lazy val RuntimeTuples_apply: Symbol = RuntimeTuplesModule.requiredMethod("apply") + @tu lazy val RuntimeTuples_cons: Symbol = RuntimeTuplesModule.requiredMethod("cons") + @tu lazy val RuntimeTuples_size: Symbol = RuntimeTuplesModule.requiredMethod("size") + @tu lazy val RuntimeTuples_tail: Symbol = RuntimeTuplesModule.requiredMethod("tail") + @tu lazy val RuntimeTuples_concat: Symbol = RuntimeTuplesModule.requiredMethod("concat") + @tu lazy val RuntimeTuples_toArray: Symbol = RuntimeTuplesModule.requiredMethod("toArray") + @tu lazy val RuntimeTuples_productToArray: Symbol = RuntimeTuplesModule.requiredMethod("productToArray") + @tu lazy val RuntimeTuples_isInstanceOfTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfTuple") + @tu lazy val RuntimeTuples_isInstanceOfEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfEmptyTuple") + @tu lazy val RuntimeTuples_isInstanceOfNonEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfNonEmptyTuple") + + @tu lazy val TupledFunctionTypeRef: TypeRef = requiredClassRef("scala.util.TupledFunction") + def TupledFunctionClass(using Context): ClassSymbol = TupledFunctionTypeRef.symbol.asClass + def RuntimeTupleFunctionsModule(using Context): Symbol = requiredModule("scala.runtime.TupledFunctions") + + @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") + @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") + @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") + @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") + @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") + @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") + + // Annotation base classes + @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") + @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") + @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") + + // Annotation classes + @tu lazy val AllowConversionsAnnot: ClassSymbol = requiredClass("scala.annotation.allowConversions") + @tu lazy val AnnotationDefaultAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AnnotationDefault") + @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") + @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") + @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") + @tu lazy val CapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.capability") + @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") + @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") + @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") + @tu lazy val DeprecatedAnnot: ClassSymbol = requiredClass("scala.deprecated") + @tu lazy val DeprecatedOverridingAnnot: ClassSymbol = requiredClass("scala.deprecatedOverriding") + @tu lazy val ImplicitAmbiguousAnnot: ClassSymbol = requiredClass("scala.annotation.implicitAmbiguous") + @tu lazy val ImplicitNotFoundAnnot: ClassSymbol = requiredClass("scala.annotation.implicitNotFound") + @tu lazy val InlineParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InlineParam") + @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") + @tu lazy val InvariantBetweenAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InvariantBetween") + @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") + @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") + @tu lazy val MigrationAnnot: ClassSymbol = requiredClass("scala.annotation.migration") + @tu lazy val NowarnAnnot: ClassSymbol = requiredClass("scala.annotation.nowarn") + @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") + @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") + @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") + @tu lazy val SourceFileAnnot: ClassSymbol = requiredClass("scala.annotation.internal.SourceFile") + @tu lazy val ScalaSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaSignature") + @tu lazy val ScalaLongSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaLongSignature") + @tu lazy val ScalaStrictFPAnnot: ClassSymbol = requiredClass("scala.annotation.strictfp") + @tu lazy val ScalaStaticAnnot: ClassSymbol = requiredClass("scala.annotation.static") + @tu lazy val SerialVersionUIDAnnot: ClassSymbol = requiredClass("scala.SerialVersionUID") + @tu lazy val TailrecAnnot: ClassSymbol = requiredClass("scala.annotation.tailrec") + @tu lazy val ThreadUnsafeAnnot: ClassSymbol = requiredClass("scala.annotation.threadUnsafe") + @tu lazy val ConstructorOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.constructorOnly") + @tu lazy val CompileTimeOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.compileTimeOnly") + @tu lazy val SwitchAnnot: ClassSymbol = requiredClass("scala.annotation.switch") + @tu lazy val ExperimentalAnnot: ClassSymbol = requiredClass("scala.annotation.experimental") + @tu lazy val ThrowsAnnot: ClassSymbol = requiredClass("scala.throws") + @tu lazy val TransientAnnot: ClassSymbol = requiredClass("scala.transient") + @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") + @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") + @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") + @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") + @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") + @tu lazy val FieldMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.field") + @tu lazy val GetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.getter") + @tu lazy val ParamMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.param") + @tu lazy val SetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.setter") + @tu lazy val ShowAsInfixAnnot: ClassSymbol = requiredClass("scala.annotation.showAsInfix") + @tu lazy val FunctionalInterfaceAnnot: ClassSymbol = requiredClass("java.lang.FunctionalInterface") + @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") + @tu lazy val VarargsAnnot: ClassSymbol = requiredClass("scala.annotation.varargs") + @tu lazy val SinceAnnot: ClassSymbol = requiredClass("scala.annotation.since") + @tu lazy val RequiresCapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.internal.requiresCapability") + @tu lazy val RetainsAnnot: ClassSymbol = requiredClass("scala.annotation.retains") + @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") + + @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") + + // A list of meta-annotations that are relevant for fields and accessors + @tu lazy val FieldAccessorMetaAnnots: Set[Symbol] = + Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot) + + // A list of annotations that are commonly used to indicate that a field/method argument or return + // type is not null. These annotations are used by the nullification logic in JavaNullInterop to + // improve the precision of type nullification. + // We don't require that any of these annotations be present in the class path, but we want to + // create Symbols for the ones that are present, so they can be checked during nullification. + @tu lazy val NotNullAnnots: List[ClassSymbol] = getClassesIfDefined( + "javax.annotation.Nonnull" :: + "javax.validation.constraints.NotNull" :: + "androidx.annotation.NonNull" :: + "android.support.annotation.NonNull" :: + "android.annotation.NonNull" :: + "com.android.annotations.NonNull" :: + "org.eclipse.jdt.annotation.NonNull" :: + "edu.umd.cs.findbugs.annotations.NonNull" :: + "org.checkerframework.checker.nullness.qual.NonNull" :: + "org.checkerframework.checker.nullness.compatqual.NonNullDecl" :: + "org.jetbrains.annotations.NotNull" :: + "org.springframework.lang.NonNull" :: + "org.springframework.lang.NonNullApi" :: + "org.springframework.lang.NonNullFields" :: + "lombok.NonNull" :: + "reactor.util.annotation.NonNull" :: + "reactor.util.annotation.NonNullApi" :: + "io.reactivex.annotations.NonNull" :: Nil) + + // convenient one-parameter method types + def methOfAny(tp: Type): MethodType = MethodType(List(AnyType), tp) + def methOfAnyVal(tp: Type): MethodType = MethodType(List(AnyValType), tp) + def methOfAnyRef(tp: Type): MethodType = MethodType(List(ObjectType), tp) + + // Derived types + + def RepeatedParamType: TypeRef = RepeatedParamClass.typeRef + + def ClassType(arg: Type)(using Context): Type = { + val ctype = ClassClass.typeRef + if (ctx.phase.erasedTypes) ctype else ctype.appliedTo(arg) + } + + /** The enumeration type, goven a value of the enumeration */ + def EnumType(sym: Symbol)(using Context): TypeRef = + // given (in java): "class A { enum E { VAL1 } }" + // - sym: the symbol of the actual enumeration value (VAL1) + // - .owner: the ModuleClassSymbol of the enumeration (object E) + // - .linkedClass: the ClassSymbol of the enumeration (class E) + sym.owner.linkedClass.typeRef + + object FunctionOf { + def apply(args: List[Type], resultType: Type, isContextual: Boolean = false, isErased: Boolean = false)(using Context): Type = + FunctionType(args.length, isContextual, isErased).appliedTo(args ::: resultType :: Nil) + def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean, Boolean)] = { + val tsym = ft.typeSymbol + if isFunctionClass(tsym) && ft.isRef(tsym) then + val targs = ft.dealias.argInfos + if (targs.isEmpty) None + else Some(targs.init, targs.last, tsym.name.isContextFunction, tsym.name.isErasedFunction) + else None + } + } + + object PartialFunctionOf { + def apply(arg: Type, result: Type)(using Context): Type = + PartialFunctionClass.typeRef.appliedTo(arg :: result :: Nil) + def unapply(pft: Type)(using Context): Option[(Type, List[Type])] = + if (pft.isRef(PartialFunctionClass)) { + val targs = pft.dealias.argInfos + if (targs.length == 2) Some((targs.head, targs.tail)) else None + } + else None + } + + object ArrayOf { + def apply(elem: Type)(using Context): Type = + if (ctx.erasedTypes) JavaArrayType(elem) + else ArrayType.appliedTo(elem :: Nil) + def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match { + case AppliedType(at, arg :: Nil) if at.isRef(ArrayType.symbol) => Some(arg) + case JavaArrayType(tp) if ctx.erasedTypes => Some(tp) + case _ => None + } + } + + object MatchCase { + def apply(pat: Type, body: Type)(using Context): Type = + MatchCaseClass.typeRef.appliedTo(pat, body) + def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match { + case AppliedType(tycon, pat :: body :: Nil) if tycon.isRef(MatchCaseClass) => + Some((pat, body)) + case _ => + None + } + def isInstance(tp: Type)(using Context): Boolean = tp match { + case AppliedType(tycon: TypeRef, _) => + tycon.name == tpnme.MatchCase && // necessary pre-filter to avoid forcing symbols + tycon.isRef(MatchCaseClass) + case _ => false + } + } + + /** An extractor for multi-dimensional arrays. + * Note that this will also extract the high bound if an + * element type is a wildcard upper-bounded by an array. E.g. + * + * Array[? <: Array[? <: Number]] + * + * would match + * + * MultiArrayOf(, 2) + */ + object MultiArrayOf { + def apply(elem: Type, ndims: Int)(using Context): Type = + if (ndims == 0) elem else ArrayOf(apply(elem, ndims - 1)) + def unapply(tp: Type)(using Context): Option[(Type, Int)] = tp match { + case ArrayOf(elemtp) => + def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match { + case tp @ TypeBounds(lo, hi @ MultiArrayOf(finalElemTp, n)) => + Some(finalElemTp, n) + case MultiArrayOf(finalElemTp, n) => Some(finalElemTp, n + 1) + case _ => Some(elemtp, 1) + } + recur(elemtp) + case _ => + None + } + } + + /** Extractor for context function types representing by-name parameters, of the form + * `() ?=> T`. + * Under purefunctions, this becomes `() ?-> T` or `{r1, ..., rN} () ?-> T`. + */ + object ByNameFunction: + def apply(tp: Type)(using Context): Type = tp match + case tp @ EventuallyCapturingType(tp1, refs) if tp.annot.symbol == RetainsByNameAnnot => + CapturingType(apply(tp1), refs) + case _ => + defn.ContextFunction0.typeRef.appliedTo(tp :: Nil) + def unapply(tp: Type)(using Context): Option[Type] = tp match + case tp @ AppliedType(tycon, arg :: Nil) if defn.isByNameFunctionClass(tycon.typeSymbol) => + Some(arg) + case tp @ AnnotatedType(parent, _) => + unapply(parent) + case _ => + None + + final def isByNameFunctionClass(sym: Symbol): Boolean = + sym eq ContextFunction0 + + def isByNameFunction(tp: Type)(using Context): Boolean = tp match + case ByNameFunction(_) => true + case _ => false + + final def isCompiletime_S(sym: Symbol)(using Context): Boolean = + sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass + + private val compiletimePackageAnyTypes: Set[Name] = Set( + tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString + ) + private val compiletimePackageNumericTypes: Set[Name] = Set( + tpnme.Plus, tpnme.Minus, tpnme.Times, tpnme.Div, tpnme.Mod, + tpnme.Lt, tpnme.Gt, tpnme.Ge, tpnme.Le, + tpnme.Abs, tpnme.Negate, tpnme.Min, tpnme.Max + ) + private val compiletimePackageIntTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToString, // ToString is moved to ops.any and deprecated for ops.int + tpnme.NumberOfLeadingZeros, tpnme.ToLong, tpnme.ToFloat, tpnme.ToDouble, + tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR + ) + private val compiletimePackageLongTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.NumberOfLeadingZeros, tpnme.ToInt, tpnme.ToFloat, tpnme.ToDouble, + tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR + ) + private val compiletimePackageFloatTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToInt, tpnme.ToLong, tpnme.ToDouble + ) + private val compiletimePackageDoubleTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToInt, tpnme.ToLong, tpnme.ToFloat + ) + private val compiletimePackageBooleanTypes: Set[Name] = Set(tpnme.Not, tpnme.Xor, tpnme.And, tpnme.Or) + private val compiletimePackageStringTypes: Set[Name] = Set( + tpnme.Plus, tpnme.Length, tpnme.Substring, tpnme.Matches, tpnme.CharAt + ) + private val compiletimePackageOpTypes: Set[Name] = + Set(tpnme.S) + ++ compiletimePackageAnyTypes + ++ compiletimePackageIntTypes + ++ compiletimePackageLongTypes + ++ compiletimePackageFloatTypes + ++ compiletimePackageDoubleTypes + ++ compiletimePackageBooleanTypes + ++ compiletimePackageStringTypes + + final def isCompiletimeAppliedType(sym: Symbol)(using Context): Boolean = + compiletimePackageOpTypes.contains(sym.name) + && ( + isCompiletime_S(sym) + || sym.owner == CompiletimeOpsAnyModuleClass && compiletimePackageAnyTypes.contains(sym.name) + || sym.owner == CompiletimeOpsIntModuleClass && compiletimePackageIntTypes.contains(sym.name) + || sym.owner == CompiletimeOpsLongModuleClass && compiletimePackageLongTypes.contains(sym.name) + || sym.owner == CompiletimeOpsFloatModuleClass && compiletimePackageFloatTypes.contains(sym.name) + || sym.owner == CompiletimeOpsDoubleModuleClass && compiletimePackageDoubleTypes.contains(sym.name) + || sym.owner == CompiletimeOpsBooleanModuleClass && compiletimePackageBooleanTypes.contains(sym.name) + || sym.owner == CompiletimeOpsStringModuleClass && compiletimePackageStringTypes.contains(sym.name) + ) + + // ----- Scala-2 library patches -------------------------------------- + + /** The `scala.runtime.stdLibPacthes` package contains objects + * that contain defnitions that get added as members to standard library + * objects with the same name. + */ + @tu lazy val StdLibPatchesPackage: TermSymbol = requiredPackage("scala.runtime.stdLibPatches") + @tu private lazy val ScalaPredefModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.Predef").moduleClass + @tu private lazy val LanguageModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.language").moduleClass + + /** If `sym` is a patched library class, the source file of its patch class, + * otherwise `NoSource` + */ + def patchSource(sym: Symbol)(using Context): SourceFile = + if sym == ScalaPredefModuleClass then ScalaPredefModuleClassPatch.source + else if sym == LanguageModuleClass then LanguageModuleClassPatch.source + else NoSource + + /** A finalizer that patches standard library classes. + * It copies all non-private, non-synthetic definitions from `patchCls` + * to `denot` while changing their owners to `denot`. Before that it deletes + * any definitions of `denot` that have the same name as one of the copied + * definitions. + * + * If an object is present in both the original class and the patch class, + * it is not overwritten. Instead its members are copied recursively. + * + * To avpid running into cycles on bootstrap, patching happens only if `patchCls` + * is read from a classfile. + */ + def patchStdLibClass(denot: ClassDenotation)(using Context): Unit = + def patch2(denot: ClassDenotation, patchCls: Symbol): Unit = + val scope = denot.info.decls.openForMutations + + def recurse(patch: Symbol) = patch.is(Module) && scope.lookup(patch.name).exists + + def makeClassSymbol(patch: Symbol, parents: List[Type], selfInfo: TypeOrSymbol) = + newClassSymbol( + owner = denot.symbol, + name = patch.name.asTypeName, + flags = patch.flags, + // need to rebuild a fresh ClassInfo + infoFn = cls => ClassInfo( + prefix = denot.symbol.thisType, + cls = cls, + declaredParents = parents, // assume parents in patch don't refer to symbols in the patch + decls = newScope, + selfInfo = + if patch.is(Module) + then TermRef(denot.symbol.thisType, patch.name.sourceModuleName) + else selfInfo // assume patch self type annotation does not refer to symbols in the patch + ), + privateWithin = patch.privateWithin, + coord = denot.symbol.coord, + assocFile = denot.symbol.associatedFile + ) + + def makeNonClassSymbol(patch: Symbol) = + if patch.is(Inline) then + // Inline symbols contain trees in annotations, which is coupled + // with the underlying symbol. + // Changing owner for inline symbols is a simple workaround. + patch.denot = patch.denot.copySymDenotation(owner = denot.symbol) + patch + else + // change `info` which might contain reference to the patch + patch.copy( + owner = denot.symbol, + info = + if patch.is(Module) + then TypeRef(denot.symbol.thisType, patch.name.moduleClassName) + else patch.info // assume non-object info does not refer to symbols in the patch + ) + + if patchCls.exists then + val patches = patchCls.info.decls.filter(patch => + !patch.isConstructor && !patch.isOneOf(PrivateOrSynthetic)) + for patch <- patches if !recurse(patch) do + val e = scope.lookupEntry(patch.name) + if e != null then scope.unlink(e) + for patch <- patches do + patch.ensureCompleted() + if !recurse(patch) then + val sym = + patch.info match + case ClassInfo(_, _, parents, _, selfInfo) => + makeClassSymbol(patch, parents, selfInfo) + case _ => + makeNonClassSymbol(patch) + end match + sym.annotations = patch.annotations + scope.enter(sym) + if patch.isClass then + patch2(scope.lookup(patch.name).asClass, patch) + + def patchWith(patchCls: Symbol) = + denot.sourceModule.info = denot.typeRef // we run into a cyclic reference when patching if this line is omitted + patch2(denot, patchCls) + + if denot.name == tpnme.Predef.moduleClassName && denot.symbol == ScalaPredefModuleClass then + patchWith(ScalaPredefModuleClassPatch) + else if denot.name == tpnme.language.moduleClassName && denot.symbol == LanguageModuleClass then + patchWith(LanguageModuleClassPatch) + end patchStdLibClass + + // ----- Symbol sets --------------------------------------------------- + + @tu lazy val topClasses: Set[Symbol] = Set(AnyClass, MatchableClass, ObjectClass, AnyValClass) + + @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) + + /** Base classes that are assumed to be pure for the purposes of capture checking. + * Every class inheriting from a pure baseclass is pure. + */ + @tu lazy val pureBaseClasses = Set(defn.AnyValClass, defn.ThrowableClass) + + /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, + */ + @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) + + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] + val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) + def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) + + @tu lazy val caseClassSynthesized: List[Symbol] = List( + Any_hashCode, Any_equals, Any_toString, Product_canEqual, Product_productArity, + Product_productPrefix, Product_productElement, Product_productElementName) + + val LazyHolder: PerRun[Map[Symbol, Symbol]] = new PerRun({ + def holderImpl(holderType: String) = requiredClass("scala.runtime." + holderType) + Map[Symbol, Symbol]( + IntClass -> holderImpl("LazyInt"), + LongClass -> holderImpl("LazyLong"), + BooleanClass -> holderImpl("LazyBoolean"), + FloatClass -> holderImpl("LazyFloat"), + DoubleClass -> holderImpl("LazyDouble"), + ByteClass -> holderImpl("LazyByte"), + CharClass -> holderImpl("LazyChar"), + ShortClass -> holderImpl("LazyShort") + ) + .withDefaultValue(holderImpl("LazyRef")) + }) + + @tu lazy val TupleType: Array[TypeRef | Null] = mkArityArray("scala.Tuple", MaxTupleArity, 1) + + def isSpecializedTuple(cls: Symbol)(using Context): Boolean = + cls.isClass && TupleSpecializedClasses.exists(tupleCls => cls.name.isSpecializedNameOf(tupleCls.name)) + + def SpecializedTuple(base: Symbol, args: List[Type])(using Context): Symbol = + base.owner.requiredClass(base.name.specializedName(args)) + + /** Cached function types of arbitary arities. + * Function types are created on demand with newFunctionNTrait, which is + * called from a synthesizer installed in ScalaPackageClass. + */ + private class FunType(prefix: String): + private var classRefs: Array[TypeRef | Null] = new Array(22) + def apply(n: Int): TypeRef = + while n >= classRefs.length do + val classRefs1 = new Array[TypeRef | Null](classRefs.length * 2) + Array.copy(classRefs, 0, classRefs1, 0, classRefs.length) + classRefs = classRefs1 + val funName = s"scala.$prefix$n" + if classRefs(n) == null then + classRefs(n) = + if prefix.startsWith("Impure") + then staticRef(funName.toTypeName).symbol.typeRef + else requiredClassRef(funName) + classRefs(n).nn + end FunType + + private def funTypeIdx(isContextual: Boolean, isErased: Boolean, isImpure: Boolean): Int = + (if isContextual then 1 else 0) + + (if isErased then 2 else 0) + + (if isImpure then 4 else 0) + + private val funTypeArray: IArray[FunType] = + val arr = Array.ofDim[FunType](8) + val choices = List(false, true) + for contxt <- choices; erasd <- choices; impure <- choices do + var str = "Function" + if contxt then str = "Context" + str + if erasd then str = "Erased" + str + if impure then str = "Impure" + str + arr(funTypeIdx(contxt, erasd, impure)) = FunType(str) + IArray.unsafeFromArray(arr) + + def FunctionSymbol(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = + funTypeArray(funTypeIdx(isContextual, isErased, isImpure))(n).symbol + + @tu lazy val Function0_apply: Symbol = Function0.requiredMethod(nme.apply) + @tu lazy val ContextFunction0_apply: Symbol = ContextFunction0.requiredMethod(nme.apply) + + @tu lazy val Function0: Symbol = FunctionSymbol(0) + @tu lazy val Function1: Symbol = FunctionSymbol(1) + @tu lazy val Function2: Symbol = FunctionSymbol(2) + @tu lazy val ContextFunction0: Symbol = FunctionSymbol(0, isContextual = true) + + def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = + FunctionSymbol(n, isContextual && !ctx.erasedTypes, isErased, isImpure).typeRef + + lazy val PolyFunctionClass = requiredClass("scala.PolyFunction") + def PolyFunctionType = PolyFunctionClass.typeRef + + /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */ + def scalaClassName(cls: Symbol)(using Context): TypeName = cls.denot match + case clsd: ClassDenotation if clsd.owner eq ScalaPackageClass => + clsd.name.asInstanceOf[TypeName] + case _ => + EmptyTypeName + + /** If type `ref` refers to a class in the scala package, its name, otherwise EmptyTypeName */ + def scalaClassName(ref: Type)(using Context): TypeName = scalaClassName(ref.classSymbol) + + private def isVarArityClass(cls: Symbol, prefix: String) = + cls.isClass + && cls.owner.eq(ScalaPackageClass) + && cls.name.testSimple(name => + name.startsWith(prefix) + && name.length > prefix.length + && digitsOnlyAfter(name, prefix.length)) + + private def digitsOnlyAfter(name: SimpleName, idx: Int): Boolean = + idx == name.length || name(idx).isDigit && digitsOnlyAfter(name, idx + 1) + + def isBottomClass(cls: Symbol): Boolean = + if ctx.mode.is(Mode.SafeNulls) && !ctx.phase.erasedTypes + then cls == NothingClass + else isBottomClassAfterErasure(cls) + + def isBottomClassAfterErasure(cls: Symbol): Boolean = cls == NothingClass || cls == NullClass + + /** Is any function class where + * - FunctionXXL + * - FunctionN for N >= 0 + * - ContextFunctionN for N >= 0 + * - ErasedFunctionN for N > 0 + * - ErasedContextFunctionN for N > 0 + */ + def isFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isFunction + + /** Is a function class, or an impure function type alias */ + def isFunctionSymbol(sym: Symbol): Boolean = + sym.isType && (sym.owner eq ScalaPackageClass) && sym.name.isFunction + + /** Is a function class where + * - FunctionN for N >= 0 and N != XXL + */ + def isPlainFunctionClass(cls: Symbol) = isVarArityClass(cls, str.Function) + + /** Is an context function class. + * - ContextFunctionN for N >= 0 + * - ErasedContextFunctionN for N > 0 + */ + def isContextFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isContextFunction + + /** Is an erased function class. + * - ErasedFunctionN for N > 0 + * - ErasedContextFunctionN for N > 0 + */ + def isErasedFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isErasedFunction + + /** Is either FunctionXXL or a class that will be erased to FunctionXXL + * - FunctionXXL + * - FunctionN for N >= 22 + * - ContextFunctionN for N >= 22 + */ + def isXXLFunctionClass(cls: Symbol): Boolean = { + val name = scalaClassName(cls) + (name eq tpnme.FunctionXXL) || name.functionArity > MaxImplementedFunctionArity + } + + /** Is a synthetic function class + * - FunctionN for N > 22 + * - ContextFunctionN for N >= 0 + * - ErasedFunctionN for N > 0 + * - ErasedContextFunctionN for N > 0 + */ + def isSyntheticFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isSyntheticFunction + + def isAbstractFunctionClass(cls: Symbol): Boolean = isVarArityClass(cls, str.AbstractFunction) + def isTupleClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Tuple) + def isProductClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Product) + + def isBoxedUnitClass(cls: Symbol): Boolean = + cls.isClass && (cls.owner eq ScalaRuntimePackageClass) && cls.name == tpnme.BoxedUnit + + /** Returns the erased type of the function class `cls` + * - FunctionN for N > 22 becomes FunctionXXL + * - FunctionN for 22 > N >= 0 remains as FunctionN + * - ContextFunctionN for N > 22 becomes FunctionXXL + * - ContextFunctionN for N <= 22 becomes FunctionN + * - ErasedFunctionN becomes Function0 + * - ImplicitErasedFunctionN becomes Function0 + * - anything else becomes a NoType + */ + def functionTypeErasure(cls: Symbol): Type = + val arity = scalaClassName(cls).functionArity + if cls.name.isErasedFunction then FunctionType(0) + else if arity > 22 then FunctionXXLClass.typeRef + else if arity >= 0 then FunctionType(arity) + else NoType + + private val JavaImportFns: List[RootRef] = List( + RootRef(() => JavaLangPackageVal.termRef) + ) + + private val ScalaImportFns: List[RootRef] = + JavaImportFns :+ + RootRef(() => ScalaPackageVal.termRef) + + private val PredefImportFns: RootRef = + RootRef(() => ScalaPredefModule.termRef, isPredef=true) + + @tu private lazy val JavaRootImportFns: List[RootRef] = + if ctx.settings.YnoImports.value then Nil + else JavaImportFns + + @tu private lazy val ScalaRootImportFns: List[RootRef] = + if ctx.settings.YnoImports.value then Nil + else if ctx.settings.YnoPredef.value then ScalaImportFns + else ScalaImportFns :+ PredefImportFns + + @tu private lazy val JavaRootImportTypes: List[TermRef] = JavaRootImportFns.map(_.refFn()) + @tu private lazy val ScalaRootImportTypes: List[TermRef] = ScalaRootImportFns.map(_.refFn()) + @tu private lazy val JavaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(JavaRootImportTypes) + @tu private lazy val ScalaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(ScalaRootImportTypes) + + /** Are we compiling a java source file? */ + private def isJavaContext(using Context): Boolean = + ctx.compilationUnit.isJava + + private def unqualifiedTypes(refs: List[TermRef]) = + val types = refs.toSet[NamedType] + types ++ types.map(_.symbol.moduleClass.typeRef) + + /** Lazy references to the root imports */ + def rootImportFns(using Context): List[RootRef] = + if isJavaContext then JavaRootImportFns + else ScalaRootImportFns + + /** Root types imported by default */ + def rootImportTypes(using Context): List[TermRef] = + if isJavaContext then JavaRootImportTypes + else ScalaRootImportTypes + + /** Modules whose members are in the default namespace and their module classes */ + def unqualifiedOwnerTypes(using Context): Set[NamedType] = + if isJavaContext then JavaUnqualifiedOwnerTypes + else ScalaUnqualifiedOwnerTypes + + /** Names of the root import symbols that can be hidden by other imports */ + @tu lazy val ShadowableImportNames: Set[TermName] = Set("Predef".toTermName) + + /** Class symbols for which no class exist at runtime */ + @tu lazy val NotRuntimeClasses: Set[Symbol] = Set(AnyClass, MatchableClass, AnyValClass, NullClass, NothingClass) + + @tu lazy val SpecialClassTagClasses: Set[Symbol] = Set(UnitClass, AnyClass, AnyValClass) + + @tu lazy val SpecialManifestClasses: Set[Symbol] = Set(AnyClass, AnyValClass, ObjectClass, NullClass, NothingClass) + + /** Classes that are known not to have an initializer irrespective of + * whether NoInits is set. Note: FunctionXXLClass is in this set + * because if it is compiled by Scala2, it does not get a NoInit flag. + * But since it is introduced only at erasure, there's no chance + * for augmentScala2Traits to do anything on a class that inherits it. So + * it also misses an implementation class, which means that the usual scheme + * of calling a superclass init in the implementation class of a Scala2 + * trait gets screwed up. Therefore, it is mandatory that FunctionXXL + * is treated as a NoInit trait. + */ + @tu lazy val NoInitClasses: Set[Symbol] = NotRuntimeClasses + FunctionXXLClass + + def isPolymorphicAfterErasure(sym: Symbol): Boolean = + (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf) || (sym eq Object_synchronized) + + /** Is this type a `TupleN` type? + * + * @return true if the dealiased type of `tp` is `TupleN[T1, T2, ..., Tn]` + */ + def isTupleNType(tp: Type)(using Context): Boolean = { + val tp1 = tp.dealias + val arity = tp1.argInfos.length + arity <= MaxTupleArity && { + val tupletp = TupleType(arity) + tupletp != null && tp1.isRef(tupletp.symbol) + } + } + + def tupleType(elems: List[Type]): Type = { + val arity = elems.length + if 0 < arity && arity <= MaxTupleArity then + val tupletp = TupleType(arity) + if tupletp != null then tupletp.appliedTo(elems) + else TypeOps.nestedPairs(elems) + else TypeOps.nestedPairs(elems) + } + + def tupleTypes(tp: Type, bound: Int = Int.MaxValue)(using Context): Option[List[Type]] = { + @tailrec def rec(tp: Type, acc: List[Type], bound: Int): Option[List[Type]] = tp.normalized.dealias match { + case _ if bound < 0 => Some(acc.reverse) + case tp: AppliedType if PairClass == tp.classSymbol => rec(tp.args(1), tp.args.head :: acc, bound - 1) + case tp: AppliedType if isTupleNType(tp) => Some(acc.reverse ::: tp.args) + case tp: TermRef if tp.symbol == defn.EmptyTupleModule => Some(acc.reverse) + case _ => None + } + rec(tp.stripTypeVar, Nil, bound) + } + + def isProductSubType(tp: Type)(using Context): Boolean = tp.derivesFrom(ProductClass) + + /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ContextFunctionN + * instance? + */ + def isNonRefinedFunction(tp: Type)(using Context): Boolean = + val arity = functionArity(tp) + val sym = tp.dealias.typeSymbol + + arity >= 0 + && isFunctionClass(sym) + && tp.isRef( + FunctionType(arity, sym.name.isContextFunction, sym.name.isErasedFunction).typeSymbol, + skipRefined = false) + end isNonRefinedFunction + + /** Is `tp` a representation of a (possibly dependent) function type or an alias of such? */ + def isFunctionType(tp: Type)(using Context): Boolean = + isNonRefinedFunction(tp.dropDependentRefinement) + + def isFunctionOrPolyType(tp: Type)(using Context): Boolean = + isFunctionType(tp) || (tp.typeSymbol eq defn.PolyFunctionClass) + + private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = + for base <- bases; tp <- paramTypes do + cls.enter(newSymbol(cls, base.specializedName(List(tp)), Method, ExprType(tp))) + cls + + @tu lazy val Tuple1: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple1"), List(nme._1), Tuple1SpecializedParamTypes) + @tu lazy val Tuple2: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple2"), List(nme._1, nme._2), Tuple2SpecializedParamTypes) + + @tu lazy val TupleSpecializedClasses: Set[Symbol] = Set(Tuple1, Tuple2) + @tu lazy val Tuple1SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType) + @tu lazy val Tuple2SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType, CharType, BooleanType) + @tu lazy val Tuple1SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple1SpecializedParamTypes.map(_.symbol)) + @tu lazy val Tuple2SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple2SpecializedParamTypes.map(_.symbol)) + + // Specialized type parameters defined for scala.Function{0,1,2}. + @tu lazy val Function1SpecializedParamTypes: collection.Set[TypeRef] = + Set(IntType, LongType, FloatType, DoubleType) + @tu lazy val Function2SpecializedParamTypes: collection.Set[TypeRef] = + Set(IntType, LongType, DoubleType) + @tu lazy val Function0SpecializedReturnTypes: collection.Set[TypeRef] = + ScalaNumericValueTypeList.toSet + UnitType + BooleanType + @tu lazy val Function1SpecializedReturnTypes: collection.Set[TypeRef] = + Set(UnitType, BooleanType, IntType, FloatType, LongType, DoubleType) + @tu lazy val Function2SpecializedReturnTypes: collection.Set[TypeRef] = + Function1SpecializedReturnTypes + + @tu lazy val Function1SpecializedParamClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function1SpecializedParamTypes.map(_.symbol)) + @tu lazy val Function2SpecializedParamClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function2SpecializedParamTypes.map(_.symbol)) + @tu lazy val Function0SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function0SpecializedReturnTypes.map(_.symbol)) + @tu lazy val Function1SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function1SpecializedReturnTypes.map(_.symbol)) + @tu lazy val Function2SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = + new PerRun(Function2SpecializedReturnTypes.map(_.symbol)) + + def isSpecializableTuple(base: Symbol, args: List[Type])(using Context): Boolean = + args.length <= 2 && base.isClass && TupleSpecializedClasses.exists(base.asClass.derivesFrom) && args.match + case List(x) => Tuple1SpecializedParamClasses().contains(x.classSymbol) + case List(x, y) => Tuple2SpecializedParamClasses().contains(x.classSymbol) && Tuple2SpecializedParamClasses().contains(y.classSymbol) + case _ => false + && base.owner.denot.info.member(base.name.specializedName(args)).exists // when dotc compiles the stdlib there are no specialised classes + + def isSpecializableFunction(cls: ClassSymbol, paramTypes: List[Type], retType: Type)(using Context): Boolean = + paramTypes.length <= 2 + && (cls.derivesFrom(FunctionSymbol(paramTypes.length)) || isByNameFunctionClass(cls)) + && isSpecializableFunctionSAM(paramTypes, retType) + + /** If the Single Abstract Method of a Function class has this type, is it specializable? */ + def isSpecializableFunctionSAM(paramTypes: List[Type], retType: Type)(using Context): Boolean = + paramTypes.length <= 2 && (paramTypes match { + case Nil => + Function0SpecializedReturnClasses().contains(retType.typeSymbol) + case List(paramType0) => + Function1SpecializedParamClasses().contains(paramType0.typeSymbol) && + Function1SpecializedReturnClasses().contains(retType.typeSymbol) + case List(paramType0, paramType1) => + Function2SpecializedParamClasses().contains(paramType0.typeSymbol) && + Function2SpecializedParamClasses().contains(paramType1.typeSymbol) && + Function2SpecializedReturnClasses().contains(retType.typeSymbol) + case _ => + false + }) + + @tu lazy val Function0SpecializedApplyNames: collection.Set[TermName] = + for r <- Function0SpecializedReturnTypes + yield nme.apply.specializedFunction(r, Nil).asTermName + + @tu lazy val Function1SpecializedApplyNames: collection.Set[TermName] = + for + r <- Function1SpecializedReturnTypes + t1 <- Function1SpecializedParamTypes + yield + nme.apply.specializedFunction(r, List(t1)).asTermName + + @tu lazy val Function2SpecializedApplyNames: collection.Set[TermName] = + for + r <- Function2SpecializedReturnTypes + t1 <- Function2SpecializedParamTypes + t2 <- Function2SpecializedParamTypes + yield + nme.apply.specializedFunction(r, List(t1, t2)).asTermName + + @tu lazy val FunctionSpecializedApplyNames: collection.Set[Name] = + Function0SpecializedApplyNames ++ Function1SpecializedApplyNames ++ Function2SpecializedApplyNames + + def functionArity(tp: Type)(using Context): Int = tp.dropDependentRefinement.dealias.argInfos.length - 1 + + /** Return underlying context function type (i.e. instance of an ContextFunctionN class) + * or NoType if none exists. The following types are considered as underlying types: + * - the alias of an alias type + * - the instance or origin of a TypeVar (i.e. the result of a stripTypeVar) + * - the upper bound of a TypeParamRef in the current constraint + */ + def asContextFunctionType(tp: Type)(using Context): Type = + tp.stripTypeVar.dealias match + case tp1: TypeParamRef if ctx.typerState.constraint.contains(tp1) => + asContextFunctionType(TypeComparer.bounds(tp1).hiBound) + case tp1 => + if tp1.typeSymbol.name.isContextFunction && isFunctionType(tp1) then tp1 + else NoType + + /** Is `tp` an context function type? */ + def isContextFunctionType(tp: Type)(using Context): Boolean = + asContextFunctionType(tp).exists + + /** An extractor for context function types `As ?=> B`, possibly with + * dependent refinements. Optionally returns a triple consisting of the argument + * types `As`, the result type `B` and a whether the type is an erased context function. + */ + object ContextFunctionType: + def unapply(tp: Type)(using Context): Option[(List[Type], Type, Boolean)] = + if ctx.erasedTypes then + atPhase(erasurePhase)(unapply(tp)) + else + val tp1 = asContextFunctionType(tp) + if tp1.exists then + val args = tp1.dropDependentRefinement.argInfos + Some((args.init, args.last, tp1.typeSymbol.name.isErasedFunction)) + else None + + def isErasedFunctionType(tp: Type)(using Context): Boolean = + tp.dealias.typeSymbol.name.isErasedFunction && isFunctionType(tp) + + /** A whitelist of Scala-2 classes that are known to be pure */ + def isAssuredNoInits(sym: Symbol): Boolean = + (sym `eq` SomeClass) || isTupleClass(sym) + + /** If `cls` is Tuple1..Tuple22, add the corresponding *: type as last parent to `parents` */ + def adjustForTuple(cls: ClassSymbol, tparams: List[TypeSymbol], parents: List[Type]): List[Type] = { + if !isTupleClass(cls) then parents + else if tparams.isEmpty then parents :+ TupleTypeRef + else + assert(parents.head.typeSymbol == ObjectClass) + TypeOps.nestedPairs(tparams.map(_.typeRef)) :: parents.tail + } + + /** If it is BoxedUnit, remove `java.io.Serializable` from `parents`. */ + def adjustForBoxedUnit(cls: ClassSymbol, parents: List[Type]): List[Type] = + if (isBoxedUnitClass(cls)) parents.filter(_.typeSymbol != JavaSerializableClass) + else parents + + private val HasProblematicGetClass: Set[Name] = Set( + tpnme.AnyVal, tpnme.Byte, tpnme.Short, tpnme.Char, tpnme.Int, tpnme.Long, tpnme.Float, tpnme.Double, + tpnme.Unit, tpnme.Boolean) + + /** When typing a primitive value class or AnyVal, we ignore the `getClass` + * member: it's supposed to be an override of the `getClass` defined on `Any`, + * but in dotty `Any#getClass` is polymorphic so it ends up being an overload. + * This is especially problematic because it means that when writing: + * + * 1.asInstanceOf[Int & AnyRef].getClass + * + * the `getClass` that returns `Class[Int]` defined in Int can be selected, + * but this call is specified to return `classOf[Integer]`, see + * tests/run/t5568.scala. + * + * FIXME: remove all the `getClass` methods defined in the standard library + * so we don't have to hot-patch it like this. + */ + def hasProblematicGetClass(className: Name): Boolean = + HasProblematicGetClass.contains(className) + + /** Is synthesized symbol with alphanumeric name allowed to be used as an infix operator? */ + def isInfix(sym: Symbol)(using Context): Boolean = + (sym eq Object_eq) || (sym eq Object_ne) + + @tu lazy val assumedTransparentNames: Map[Name, Set[Symbol]] = + // add these for now, until we had a chance to retrofit 2.13 stdlib + // we should do a more through sweep through it then. + val strs = Map( + "Any" -> Set("scala"), + "AnyVal" -> Set("scala"), + "Matchable" -> Set("scala"), + "Product" -> Set("scala"), + "Object" -> Set("java.lang"), + "Comparable" -> Set("java.lang"), + "Serializable" -> Set("java.io"), + "BitSetOps" -> Set("scala.collection"), + "IndexedSeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "IterableOnceOps" -> Set("scala.collection"), + "IterableOps" -> Set("scala.collection"), + "LinearSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "MapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedMapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "SortedOps" -> Set("scala.collection"), + "SortedSetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), + "StrictOptimizedIterableOps" -> Set("scala.collection"), + "StrictOptimizedLinearSeqOps" -> Set("scala.collection"), + "StrictOptimizedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSeqOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedMapOps" -> Set("scala.collection", "scala.collection.immutable"), + "StrictOptimizedSortedSetOps" -> Set("scala.collection", "scala.collection.immutable"), + "ArrayDequeOps" -> Set("scala.collection.mutable"), + "DefaultSerializable" -> Set("scala.collection.generic"), + "IsIterable" -> Set("scala.collection.generic"), + "IsIterableLowPriority" -> Set("scala.collection.generic"), + "IsIterableOnce" -> Set("scala.collection.generic"), + "IsIterableOnceLowPriority" -> Set("scala.collection.generic"), + "IsMap" -> Set("scala.collection.generic"), + "IsSeq" -> Set("scala.collection.generic")) + strs.map { case (simple, pkgs) => ( + simple.toTypeName, + pkgs.map(pkg => staticRef(pkg.toTermName, isPackage = true).symbol.moduleClass) + ) + } + + def isAssumedTransparent(sym: Symbol): Boolean = + assumedTransparentNames.get(sym.name) match + case Some(pkgs) => pkgs.contains(sym.owner) + case none => false + + // ----- primitive value class machinery ------------------------------------------ + + class PerRun[T](generate: Context ?=> T) { + private var current: RunId = NoRunId + private var cached: T = _ + def apply()(using Context): T = { + if (current != ctx.runId) { + cached = generate + current = ctx.runId + } + cached + } + } + + @tu lazy val ScalaNumericValueTypeList: List[TypeRef] = List( + ByteType, ShortType, CharType, IntType, LongType, FloatType, DoubleType) + + @tu private lazy val ScalaNumericValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypeList.toSet + @tu private lazy val ScalaValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypes `union` Set(UnitType, BooleanType) + + val ScalaNumericValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaNumericValueTypes.map(_.symbol)) + val ScalaValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaValueTypes.map(_.symbol)) + + val ScalaBoxedClasses: PerRun[collection.Set[Symbol]] = new PerRun( + Set(BoxedByteClass, BoxedShortClass, BoxedCharClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass, BoxedUnitClass, BoxedBooleanClass) + ) + + private val valueTypeEnc = mutable.Map[TypeName, PrimitiveClassEnc]() + private val typeTags = mutable.Map[TypeName, Name]().withDefaultValue(nme.specializedTypeNames.Object) + +// private val unboxedTypeRef = mutable.Map[TypeName, TypeRef]() +// private val javaTypeToValueTypeRef = mutable.Map[Class[?], TypeRef]() +// private val valueTypeNamesToJavaType = mutable.Map[TypeName, Class[?]]() + + private def valueTypeRef(name: String, jtype: Class[?], enc: Int, tag: Name): TypeRef = { + val vcls = requiredClassRef(name) + valueTypeEnc(vcls.name) = enc + typeTags(vcls.name) = tag +// unboxedTypeRef(boxed.name) = vcls +// javaTypeToValueTypeRef(jtype) = vcls +// valueTypeNamesToJavaType(vcls.name) = jtype + vcls + } + + /** The type of the boxed class corresponding to primitive value type `tp`. */ + def boxedType(tp: Type)(using Context): TypeRef = { + val cls = tp.classSymbol + if (cls eq ByteClass) BoxedByteClass + else if (cls eq ShortClass) BoxedShortClass + else if (cls eq CharClass) BoxedCharClass + else if (cls eq IntClass) BoxedIntClass + else if (cls eq LongClass) BoxedLongClass + else if (cls eq FloatClass) BoxedFloatClass + else if (cls eq DoubleClass) BoxedDoubleClass + else if (cls eq UnitClass) BoxedUnitClass + else if (cls eq BooleanClass) BoxedBooleanClass + else sys.error(s"Not a primitive value type: $tp") + }.typeRef + + def unboxedType(tp: Type)(using Context): TypeRef = { + val cls = tp.classSymbol + if (cls eq BoxedByteClass) ByteType + else if (cls eq BoxedShortClass) ShortType + else if (cls eq BoxedCharClass) CharType + else if (cls eq BoxedIntClass) IntType + else if (cls eq BoxedLongClass) LongType + else if (cls eq BoxedFloatClass) FloatType + else if (cls eq BoxedDoubleClass) DoubleType + else if (cls eq BoxedUnitClass) UnitType + else if (cls eq BoxedBooleanClass) BooleanType + else sys.error(s"Not a boxed primitive value type: $tp") + } + + /** The JVM tag for `tp` if it's a primitive, `java.lang.Object` otherwise. */ + def typeTag(tp: Type)(using Context): Name = typeTags(scalaClassName(tp)) + +// /** The `Class[?]` of a primitive value type name */ +// def valueTypeNameToJavaType(name: TypeName)(using Context): Option[Class[?]] = +// valueTypeNamesToJavaType.get(if (name.firstPart eq nme.scala) name.lastPart.toTypeName else name) + + type PrimitiveClassEnc = Int + + val ByteEnc: Int = 2 + val ShortEnc: Int = ByteEnc * 3 + val CharEnc: Int = 5 + val IntEnc: Int = ShortEnc * CharEnc + val LongEnc: Int = IntEnc * 7 + val FloatEnc: Int = LongEnc * 11 + val DoubleEnc: Int = FloatEnc * 13 + val BooleanEnc: Int = 17 + val UnitEnc: Int = 19 + + def isValueSubType(tref1: TypeRef, tref2: TypeRef)(using Context): Boolean = + valueTypeEnc(tref2.name) % valueTypeEnc(tref1.name) == 0 + def isValueSubClass(sym1: Symbol, sym2: Symbol): Boolean = + valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 + + @tu lazy val specialErasure: SimpleIdentityMap[Symbol, ClassSymbol] = + SimpleIdentityMap.empty[Symbol] + .updated(AnyClass, ObjectClass) + .updated(MatchableClass, ObjectClass) + .updated(AnyValClass, ObjectClass) + .updated(SingletonClass, ObjectClass) + .updated(TupleClass, ProductClass) + .updated(NonEmptyTupleClass, ProductClass) + .updated(PairClass, ObjectClass) + + // ----- Initialization --------------------------------------------------- + + /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ + @tu lazy val syntheticScalaClasses: List[TypeSymbol] = + List( + AnyClass, + MatchableClass, + AnyRefAlias, + AnyKindClass, + andType, + orType, + RepeatedParamClass, + ByNameParamClass2x, + IntoType, + AnyValClass, + NullClass, + NothingClass, + SingletonClass) + + @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( + EmptyPackageVal, + OpsPackageClass) + + /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ + @tu lazy val syntheticCoreMethods: List[TermSymbol] = + AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod) + + @tu lazy val reservedScalaClassNames: Set[Name] = syntheticScalaClasses.map(_.name).toSet + + private var isInitialized = false + + def init()(using ctx: DetachedContext): Unit = { + this.initCtx = ctx + if (!isInitialized) { + // force initialization of every symbol that is synthesized or hijacked by the compiler + val forced = + syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass + isInitialized = true + } + addSyntheticSymbolsComments + } + + /** Definitions used in Lazy Vals implementation */ + val LazyValsModuleName = "scala.runtime.LazyVals" + @tu lazy val LazyValsModule = requiredModule(LazyValsModuleName) + @tu lazy val LazyValsWaitingState = requiredClass(s"$LazyValsModuleName.Waiting") + @tu lazy val LazyValsControlState = requiredClass(s"$LazyValsModuleName.LazyValControlState") + + def addSyntheticSymbolsComments(using Context): Unit = + def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) + + add(AnyClass, + """/** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala + | * execution environment inherits directly or indirectly from this class. + | * + | * Starting with Scala 2.10 it is possible to directly extend `Any` using ''universal traits''. + | * A ''universal trait'' is a trait that extends `Any`, only has `def`s as members, and does no initialization. + | * + | * The main use case for universal traits is to allow basic inheritance of methods for [[scala.AnyVal value classes]]. + | * For example, + | * + | * {{{ + | * trait Printable extends Any { + | * def print(): Unit = println(this) + | * } + | * class Wrapper(val underlying: Int) extends AnyVal with Printable + | * + | * val w = new Wrapper(3) + | * w.print() + | * }}} + | * + | * See the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]] for more + | * details on the interplay of universal traits and value classes. + | */ + """.stripMargin) + + add(Any_==, + """/** Test two objects for equality. + | * The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`. + | * + | * @param that the object to compare against this object for equality. + | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. + | */ + """.stripMargin) + + add(Any_!=, + """/** Test two objects for inequality. + | * + | * @param that the object to compare against this object for equality. + | * @return `true` if !(this == that), `false` otherwise. + | */ + """.stripMargin) + + add(Any_equals, + """/** Compares the receiver object (`this`) with the argument object (`that`) for equivalence. + | * + | * Any implementation of this method should be an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]: + | * + | * - It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`. + | * - It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and + | * only if `y.equals(x)` returns `true`. + | * - It is transitive: for any instances `x`, `y`, and `z` of type `Any` if `x.equals(y)` returns `true` and + | * `y.equals(z)` returns `true`, then `x.equals(z)` should return `true`. + | * + | * If you override this method, you should verify that your implementation remains an equivalence relation. + | * Additionally, when overriding this method it is usually necessary to override `hashCode` to ensure that + | * objects which are "equal" (`o1.equals(o2)` returns `true`) hash to the same [[scala.Int]]. + | * (`o1.hashCode.equals(o2.hashCode)`). + | * + | * @param that the object to compare against this object for equality. + | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. + | */ + """.stripMargin) + + add(Any_hashCode, + """/** Calculate a hash code value for the object. + | * + | * The default hashing algorithm is platform dependent. + | * + | * Note that it is allowed for two objects to have identical hash codes (`o1.hashCode.equals(o2.hashCode)`) yet + | * not be equal (`o1.equals(o2)` returns `false`). A degenerate implementation could always return `0`. + | * However, it is required that if two objects are equal (`o1.equals(o2)` returns `true`) that they have + | * identical hash codes (`o1.hashCode.equals(o2.hashCode)`). Therefore, when overriding this method, be sure + | * to verify that the behavior is consistent with the `equals` method. + | * + | * @return the hash code value for this object. + | */ + """.stripMargin) + + add(Any_toString, + """/** Returns a string representation of the object. + | * + | * The default representation is platform dependent. + | * + | * @return a string representation of the object. + | */ + """.stripMargin) + + add(Any_##, + """/** Equivalent to `x.hashCode` except for boxed numeric types and `null`. + | * For numerics, it returns a hash value which is consistent + | * with value equality: if two value type instances compare + | * as true, then ## will produce the same hash value for each + | * of them. + | * For `null` returns a hashcode where `null.hashCode` throws a + | * `NullPointerException`. + | * + | * @return a hash value consistent with == + | */ + """.stripMargin) + + add(Any_isInstanceOf, + """/** Test whether the dynamic type of the receiver object is `T0`. + | * + | * Note that the result of the test is modulo Scala's erasure semantics. + | * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the + | * expression `List(1).isInstanceOf[List[String]]` will return `true`. + | * In the latter example, because the type argument is erased as part of compilation it is + | * not possible to check whether the contents of the list are of the specified type. + | * + | * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. + | */ + """.stripMargin) + + add(Any_asInstanceOf, + """/** Cast the receiver object to be of type `T0`. + | * + | * Note that the success of a cast at runtime is modulo Scala's erasure semantics. + | * Therefore the expression `1.asInstanceOf[String]` will throw a `ClassCastException` at + | * runtime, while the expression `List(1).asInstanceOf[List[String]]` will not. + | * In the latter example, because the type argument is erased as part of compilation it is + | * not possible to check whether the contents of the list are of the requested type. + | * + | * @throws ClassCastException if the receiver object is not an instance of the erasure of type `T0`. + | * @return the receiver object. + | */ + """.stripMargin) + + add(Any_getClass, + """/** Returns the runtime class representation of the object. + | * + | * @return a class object corresponding to the runtime type of the receiver. + | */ + """.stripMargin) + + add(MatchableClass, + """/** The base trait of types that can be safely pattern matched against. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html]]. + | */ + """.stripMargin) + + add(AnyRefAlias, + """/** Class `AnyRef` is the root class of all ''reference types''. + | * All types except the value types descend from this class. + | */ + """.stripMargin) + + add(Object_eq, + """/** Tests whether the argument (`that`) is a reference to the receiver object (`this`). + | * + | * The `eq` method implements an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] on + | * non-null instances of `AnyRef`, and has three additional properties: + | * + | * - It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of + | * `x.eq(y)` consistently returns `true` or consistently returns `false`. + | * - For any non-null instance `x` of type `AnyRef`, `x.eq(null)` and `null.eq(x)` returns `false`. + | * - `null.eq(null)` returns `true`. + | * + | * When overriding the `equals` or `hashCode` methods, it is important to ensure that their behavior is + | * consistent with reference equality. Therefore, if two objects are references to each other (`o1 eq o2`), they + | * should be equal to each other (`o1 == o2`) and they should hash to the same value (`o1.hashCode == o2.hashCode`). + | * + | * @param that the object to compare against this object for reference equality. + | * @return `true` if the argument is a reference to the receiver object; `false` otherwise. + | */ + """.stripMargin) + + add(Object_ne, + """/** Equivalent to `!(this eq that)`. + | * + | * @param that the object to compare against this object for reference equality. + | * @return `true` if the argument is not a reference to the receiver object; `false` otherwise. + | */ + """.stripMargin) + + add(Object_synchronized, + """/** Executes the code in `body` with an exclusive lock on `this`. + | * + | * @param body the code to execute + | * @return the result of `body` + | */ + """.stripMargin) + + add(Object_clone, + """/** Create a copy of the receiver object. + | * + | * The default implementation of the `clone` method is platform dependent. + | * + | * @note not specified by SLS as a member of AnyRef + | * @return a copy of the receiver object. + | */ + """.stripMargin) + + add(Object_finalize, + """/** Called by the garbage collector on the receiver object when there + | * are no more references to the object. + | * + | * The details of when and if the `finalize` method is invoked, as + | * well as the interaction between `finalize` and non-local returns + | * and exceptions, are all platform dependent. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_notify, + """/** Wakes up a single thread that is waiting on the receiver object's monitor. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_notifyAll, + """/** Wakes up all threads that are waiting on the receiver object's monitor. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_wait, + """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait--]]. + | * + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_waitL, + """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-]]. + | * + | * @param timeout the maximum time to wait in milliseconds. + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(Object_waitLI, + """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-int-]] + | * + | * @param timeout the maximum time to wait in milliseconds. + | * @param nanos additional time, in nanoseconds range 0-999999. + | * @note not specified by SLS as a member of AnyRef + | */ + """.stripMargin) + + add(AnyKindClass, + """/** The super-type of all types. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html]]. + | */ + """.stripMargin) + + add(andType, + """/** The intersection of two types. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html]]. + | */ + """.stripMargin) + + add(orType, + """/** The union of two types. + | * + | * See [[https://docs.scala-lang.org/scala3/reference/new-types/union-types.html]]. + | */ + """.stripMargin) + + add(AnyValClass, + """/** `AnyVal` is the root class of all ''value types'', which describe values + | * not implemented as objects in the underlying host system. Value classes + | * are specified in Scala Language Specification, section 12.2. + | * + | * The standard implementation includes nine `AnyVal` subtypes: + | * + | * [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]], + | * [[scala.Short]], and [[scala.Byte]] are the ''numeric value types''. + | * + | * [[scala.Unit]] and [[scala.Boolean]] are the ''non-numeric value types''. + | * + | * Other groupings: + | * + | * - The ''subrange types'' are [[scala.Byte]], [[scala.Short]], and [[scala.Char]]. + | * - The ''integer types'' include the subrange types as well as [[scala.Int]] and [[scala.Long]]. + | * - The ''floating point types'' are [[scala.Float]] and [[scala.Double]]. + | * + | * Prior to Scala 2.10, `AnyVal` was a sealed trait. Beginning with Scala 2.10, + | * however, it is possible to define a subclass of `AnyVal` called a ''user-defined value class'' + | * which is treated specially by the compiler. Properly-defined user value classes provide a way + | * to improve performance on user-defined types by avoiding object allocation at runtime, and by + | * replacing virtual method invocations with static method invocations. + | * + | * User-defined value classes which avoid object allocation... + | * + | * - must have a single `val` parameter that is the underlying runtime representation. + | * - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s. + | * - typically extend no other trait apart from `AnyVal`. + | * - cannot be used in type tests or pattern matching. + | * - may not override `equals` or `hashCode` methods. + | * + | * A minimal example: + | * {{{ + | * class Wrapper(val underlying: Int) extends AnyVal { + | * def foo: Wrapper = new Wrapper(underlying * 19) + | * } + | * }}} + | * + | * It's important to note that user-defined value classes are limited, and in some circumstances, + | * still must allocate a value class instance at runtime. These limitations and circumstances are + | * explained in greater detail in the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]]. + | */ + """.stripMargin) + + add(NullClass, + """/** `Null` is - together with [[scala.Nothing]] - at the bottom of the Scala type hierarchy. + | * + | * `Null` is the type of the `null` literal. It is a subtype of every type + | * except those of value classes. Value classes are subclasses of [[AnyVal]], which includes + | * primitive types such as [[Int]], [[Boolean]], and user-defined value classes. + | * + | * Since `Null` is not a subtype of value types, `null` is not a member of any such type. + | * For instance, it is not possible to assign `null` to a variable of type [[scala.Int]]. + | */ + """.stripMargin) + + add(NothingClass, + """/** `Nothing` is - together with [[scala.Null]] - at the bottom of Scala's type hierarchy. + | * + | * `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist + | * ''no instances'' of this type. Although type `Nothing` is uninhabited, it is + | * nevertheless useful in several ways. For instance, the Scala library defines a value + | * [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists are covariant in Scala, + | * this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any element of type `T`. + | * + | * Another usage for Nothing is the return type for methods which never return normally. + | * One example is method error in [[scala.sys]], which always throws an exception. + | */ + """.stripMargin) + + add(SingletonClass, + """/** `Singleton` is used by the compiler as a supertype for singleton types. This includes literal types, + | * as they are also singleton types. + | * + | * {{{ + | * scala> object A { val x = 42 } + | * defined object A + | * + | * scala> implicitly[A.type <:< Singleton] + | * res12: A.type <:< Singleton = generalized constraint + | * + | * scala> implicitly[A.x.type <:< Singleton] + | * res13: A.x.type <:< Singleton = generalized constraint + | * + | * scala> implicitly[42 <:< Singleton] + | * res14: 42 <:< Singleton = generalized constraint + | * + | * scala> implicitly[Int <:< Singleton] + | * ^ + | * error: Cannot prove that Int <:< Singleton. + | * }}} + | * + | * `Singleton` has a special meaning when it appears as an upper bound on a formal type + | * parameter. Normally, type inference in Scala widens singleton types to the underlying + | * non-singleton type. When a type parameter has an explicit upper bound of `Singleton`, + | * the compiler infers a singleton type. + | * + | * {{{ + | * scala> def check42[T](x: T)(implicit ev: T =:= 42): T = x + | * check42: [T](x: T)(implicit ev: T =:= 42)T + | * + | * scala> val x1 = check42(42) + | * ^ + | * error: Cannot prove that Int =:= 42. + | * + | * scala> def singleCheck42[T <: Singleton](x: T)(implicit ev: T =:= 42): T = x + | * singleCheck42: [T <: Singleton](x: T)(implicit ev: T =:= 42)T + | * + | * scala> val x2 = singleCheck42(42) + | * x2: Int = 42 + | * }}} + | * + | * See also [[https://docs.scala-lang.org/sips/42.type.html SIP-23 about Literal-based Singleton Types]]. + | */ + """.stripMargin) +} diff --git a/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala b/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala new file mode 100644 index 000000000000..6690cae3a142 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala @@ -0,0 +1,82 @@ +package dotty.tools.dotc +package core + +import Periods._ +import SymDenotations._ +import Contexts._ +import Types._ +import Symbols._ +import Denotations._ +import Phases._ + +object DenotTransformers { + + /** A transformer group contains a sequence of transformers, + * ordered by the phase where they apply. Transformers are added + * to a group via `install`. + */ + + /** A transformer transforms denotations at a given phase */ + trait DenotTransformer extends Phase { + + /** The last phase during which the transformed denotations are valid */ + def lastPhaseId(using Context): Int = ctx.base.nextDenotTransformerId(id + 1) + + /** The validity period of the transformed denotations in the given context */ + def validFor(using Context): Period = + Period(ctx.runId, id + 1, lastPhaseId) + + /** The transformation method */ + def transform(ref: SingleDenotation)(using Context): SingleDenotation + } + + /** A transformer that only transforms the info field of denotations */ + trait InfoTransformer extends DenotTransformer { + + def transformInfo(tp: Type, sym: Symbol)(using Context): Type + + def transform(ref: SingleDenotation)(using Context): SingleDenotation = { + val sym = ref.symbol + if (sym.exists && !infoMayChange(sym)) ref + else { + val info1 = transformInfo(ref.info, ref.symbol) + if (info1 eq ref.info) ref + else ref match { + case ref: SymDenotation => + ref.copySymDenotation(info = info1).copyCaches(ref, ctx.phase.next) + case _ => + ref.derivedSingleDenotation(ref.symbol, info1) + } + } + } + + /** Denotations with a symbol where `infoMayChange` is false are guaranteed to be + * unaffected by this transform, so `transformInfo` need not be run. This + * can save time, and more importantly, can help avoid forcing symbol completers. + */ + protected def infoMayChange(sym: Symbol)(using Context): Boolean = true + } + + /** A transformer that only transforms SymDenotations. + * Note: Infos of non-sym denotations are left as is. So the transformer should + * be used before erasure only if this is not a problem. After erasure, all + * denotations are SymDenotations, so SymTransformers can be used freely. + */ + trait SymTransformer extends DenotTransformer { + + def transformSym(sym: SymDenotation)(using Context): SymDenotation + + def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match { + case ref: SymDenotation => transformSym(ref) + case _ => ref + } + } + + /** A `DenotTransformer` trait that has the identity as its `transform` method. + * You might want to inherit from this trait so that new denotations can be + * installed using `installAfter` and `enteredAfter` at the end of the phase. + */ + trait IdentityDenotTransformer extends DenotTransformer { + def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala new file mode 100644 index 000000000000..246e359f0597 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala @@ -0,0 +1,1376 @@ +package dotty.tools +package dotc +package core + +import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid } +import Contexts._ +import Names._ +import NameKinds._ +import StdNames._ +import Symbols.NoSymbol +import Symbols._ +import Types._ +import Periods._ +import Flags._ +import DenotTransformers._ +import Decorators._ +import Signature.MatchDegree._ +import printing.Texts._ +import printing.Printer +import io.AbstractFile +import config.Config +import config.Printers.overload +import util.common._ +import typer.ProtoTypes.NoViewsAllowed +import collection.mutable.ListBuffer +import language.experimental.pureFunctions + +/** Denotations represent the meaning of symbols and named types. + * The following diagram shows how the principal types of denotations + * and their denoting entities relate to each other. Lines ending in + * a down-arrow `v` are member methods. The two methods shown in the diagram are + * "symbol" and "deref". Both methods are parameterized by the current context, + * and are effectively indexed by current period. + * + * Lines ending in a horizontal line mean subtyping (right is a subtype of left). + * + * NamedType + * | Symbol---------ClassSymbol + * | | | + * | denot | denot | denot + * v v v + * Denotation-+-----SingleDenotation-+------SymDenotation-+----ClassDenotation + * | | + * +-----MultiDenotation | + * | + * +--UniqueRefDenotation + * +--JointRefDenotation + * + * Here's a short summary of the classes in this diagram. + * + * NamedType A type consisting of a prefix type and a name, with fields + * prefix: Type + * name: Name + * It has two subtypes: TermRef and TypeRef + * Symbol A label for a definition or declaration in one compiler run + * ClassSymbol A symbol representing a class + * Denotation The meaning of a named type or symbol during a period + * MultiDenotation A denotation representing several overloaded members + * SingleDenotation A denotation representing a non-overloaded member or definition, with main fields + * symbol: Symbol + * info: Type + * UniqueRefDenotation A denotation referring to a single definition with some member type + * JointRefDenotation A denotation referring to a member that could resolve to several definitions + * SymDenotation A denotation representing a single definition with its original type, with main fields + * name: Name + * owner: Symbol + * flags: Flags + * privateWithin: Symbol + * annotations: List[Annotation] + * ClassDenotation A denotation representing a single class definition. + */ +object Denotations { + + implicit def eqDenotation: CanEqual[Denotation, Denotation] = CanEqual.derived + + /** A PreDenotation represents a group of single denotations or a single multi-denotation + * It is used as an optimization to avoid forming MultiDenotations too eagerly. + */ + abstract class PreDenotation extends caps.Pure { + + /** A denotation in the group exists */ + def exists: Boolean + + /** First/last denotation in the group */ + def first: Denotation + def last: Denotation + + /** Convert to full denotation by &-ing all elements */ + def toDenot(pre: Type)(using Context): Denotation + + /** Group contains a denotation that refers to given symbol */ + def containsSym(sym: Symbol): Boolean + + /** Group contains a denotation with the same signature as `other` */ + def matches(other: SingleDenotation)(using Context): Boolean + + /** Keep only those denotations in this group which satisfy predicate `p`. */ + def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation + + /** Keep only those denotations in this group which have a signature + * that's not already defined by `denots`. + */ + def filterDisjoint(denots: PreDenotation)(using Context): PreDenotation + + /** Keep only those inherited members M of this predenotation for which the following is true + * - M is not marked Private + * - If M has a unique symbol, it does not appear in `prevDenots`. + * - M's signature as seen from prefix `pre` does not appear in `ownDenots` + * Return the denotation as seen from `pre`. + * Called from SymDenotations.computeMember. There, `ownDenots` are the denotations found in + * the base class, which shadow any inherited denotations with the same signature. + * `prevDenots` are the denotations that are defined in the class or inherited from + * a base type which comes earlier in the linearization. + */ + def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): PreDenotation + + /** Keep only those denotations in this group that have all of the flags in `required`, + * but none of the flags in `excluded`. + */ + def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation + + /** Map `f` over all single denotations and aggregate the results with `g`. */ + def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T + + private var cachedPrefix: Type = _ + private var cachedAsSeenFrom: AsSeenFromResult = _ + private var validAsSeenFrom: Period = Nowhere + + type AsSeenFromResult <: PreDenotation + + /** The denotation with info(s) as seen from prefix type */ + def asSeenFrom(pre: Type)(using Context): AsSeenFromResult = + if (Config.cacheAsSeenFrom) { + if ((cachedPrefix ne pre) || ctx.period != validAsSeenFrom) { + cachedAsSeenFrom = computeAsSeenFrom(pre) + cachedPrefix = pre + validAsSeenFrom = if (pre.isProvisional) Nowhere else ctx.period + } + cachedAsSeenFrom + } + else computeAsSeenFrom(pre) + + protected def computeAsSeenFrom(pre: Type)(using Context): AsSeenFromResult + + /** The union of two groups. */ + def union(that: PreDenotation): PreDenotation = + if (!this.exists) that + else if (!that.exists) this + else DenotUnion(this, that) + } + + /** A denotation is the result of resolving + * a name (either simple identifier or select) during a given period. + * + * Denotations can be combined with `&` and `|`. + * & is conjunction, | is disjunction. + * + * `&` will create an overloaded denotation from two + * non-overloaded denotations if their signatures differ. + * Analogously `|` of two denotations with different signatures will give + * an empty denotation `NoDenotation`. + * + * A denotation might refer to `NoSymbol`. This is the case if the denotation + * was produced from a disjunction of two denotations with different symbols + * and there was no common symbol in a superclass that could substitute for + * both symbols. Here is an example: + * + * Say, we have: + * + * class A { def f: A } + * class B { def f: B } + * val x: A | B = if (test) new A else new B + * val y = x.f + * + * Then the denotation of `y` is `SingleDenotation(NoSymbol, A | B)`. + * + * @param symbol The referencing symbol, or NoSymbol is none exists + */ + abstract class Denotation(val symbol: Symbol, protected var myInfo: Type) extends PreDenotation with printing.Showable { + type AsSeenFromResult <: Denotation + + /** The type info. + * The info is an instance of TypeType iff this is a type denotation + * Uncompleted denotations set myInfo to a LazyType. + */ + final def info(using Context): Type = { + def completeInfo = { // Written this way so that `info` is small enough to be inlined + this.asInstanceOf[SymDenotation].completeFrom(myInfo.asInstanceOf[LazyType]); info + } + if (myInfo.isInstanceOf[LazyType]) completeInfo else myInfo + } + + /** The type info, or, if this is a SymDenotation where the symbol + * is not yet completed, the completer + */ + def infoOrCompleter: Type + + /** The period during which this denotation is valid. */ + def validFor: Period + + /** Is this a reference to a type symbol? */ + def isType: Boolean + + /** Is this a reference to a term symbol? */ + def isTerm: Boolean = !isType + + /** Is this denotation overloaded? */ + final def isOverloaded: Boolean = isInstanceOf[MultiDenotation] + + /** Denotation points to unique symbol; false for overloaded denotations + * and JointRef denotations. + */ + def hasUniqueSym: Boolean + + /** The name of the denotation */ + def name(using Context): Name + + /** The signature of the denotation. */ + def signature(using Context): Signature + + /** Resolve overloaded denotation to pick the ones with the given signature + * when seen from prefix `site`. + * @param relaxed When true, consider only parameter signatures for a match. + */ + def atSignature(sig: Signature, targetName: Name, site: Type = NoPrefix, relaxed: Boolean = false)(using Context): Denotation + + /** The variant of this denotation that's current in the given context. + * If no such denotation exists, returns the denotation with each alternative + * at its first point of definition. + */ + def current(using Context): Denotation + + /** Is this denotation different from NoDenotation or an ErrorDenotation? */ + def exists: Boolean = true + + /** A denotation with the info of this denotation transformed using `f` */ + def mapInfo(f: Type => Type)(using Context): Denotation + + /** If this denotation does not exist, fallback to alternative */ + inline def orElse(inline that: Denotation): Denotation = if (this.exists) this else that + + /** The set of alternative single-denotations making up this denotation */ + final def alternatives: List[SingleDenotation] = altsWith(alwaysTrue) + + /** The alternatives of this denotation that satisfy the predicate `p`. */ + def altsWith(p: Symbol => Boolean): List[SingleDenotation] + + /** The unique alternative of this denotation that satisfies the predicate `p`, + * or NoDenotation if no satisfying alternative exists. + * @throws TypeError if there is at more than one alternative that satisfies `p`. + */ + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation + + override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation + + /** If this is a SingleDenotation, return it, otherwise throw a TypeError */ + def checkUnique(using Context): SingleDenotation = suchThat(alwaysTrue) + + /** Does this denotation have an alternative that satisfies the predicate `p`? */ + def hasAltWith(p: SingleDenotation => Boolean): Boolean + + /** The denotation made up from the alternatives of this denotation that + * are accessible from prefix `pre`, or NoDenotation if no accessible alternative exists. + */ + def accessibleFrom(pre: Type, superAccess: Boolean = false)(using Context): Denotation + + /** Find member of this denotation with given `name`, all `required` + * flags and no `excluded` flag, and produce a denotation that contains the type of the member + * as seen from given prefix `pre`. + */ + def findMember(name: Name, pre: Type, required: FlagSet, excluded: FlagSet)(using Context): Denotation = + info.findMember(name, pre, required, excluded) + + /** If this denotation is overloaded, filter with given predicate. + * If result is still overloaded throw a TypeError. + * Note: disambiguate is slightly different from suchThat in that + * single-denotations that do not satisfy the predicate are left alone + * (whereas suchThat would map them to NoDenotation). + */ + inline def disambiguate(inline p: Symbol => Boolean)(using Context): SingleDenotation = this match { + case sdenot: SingleDenotation => sdenot + case mdenot => suchThat(p) orElse NoQualifyingRef(alternatives) + } + + /** Return symbol in this denotation that satisfies the given predicate. + * if generateStubs is specified, return a stubsymbol if denotation is a missing ref. + * Throw a `TypeError` if predicate fails to disambiguate symbol or no alternative matches. + */ + def requiredSymbol(kind: String, + name: Name, + site: Denotation = NoDenotation, + args: List[Type] = Nil, + source: AbstractFile | Null = null, + generateStubs: Boolean = true) + (p: Symbol => Boolean) + (using Context): Symbol = + disambiguate(p) match { + case m @ MissingRef(ownerd, name) if generateStubs => + if ctx.settings.YdebugMissingRefs.value then m.ex.printStackTrace() + newStubSymbol(ownerd.symbol, name, source) + case NoDenotation | _: NoQualifyingRef | _: MissingRef => + def argStr = if (args.isEmpty) "" else i" matching ($args%, %)" + val msg = + if site.exists then em"$site does not have a member $kind $name$argStr" + else em"missing: $kind $name$argStr" + throw TypeError(msg) + case denot => + denot.symbol + } + + def requiredMethod(pname: PreName)(using Context): TermSymbol = { + val name = pname.toTermName + info.member(name).requiredSymbol("method", name, this)(_.is(Method)).asTerm + } + def requiredMethodRef(name: PreName)(using Context): TermRef = + requiredMethod(name).termRef + + def requiredMethod(pname: PreName, argTypes: List[Type])(using Context): TermSymbol = { + val name = pname.toTermName + info.member(name).requiredSymbol("method", name, this, argTypes) { x => + x.is(Method) && { + x.info.paramInfoss match { + case paramInfos :: Nil => paramInfos.corresponds(argTypes)(_ =:= _) + case _ => false + } + } + }.asTerm + } + def requiredMethodRef(name: PreName, argTypes: List[Type])(using Context): TermRef = + requiredMethod(name, argTypes).termRef + + def requiredValue(pname: PreName)(using Context): TermSymbol = { + val name = pname.toTermName + info.member(name).requiredSymbol("field or getter", name, this)(_.info.isParameterless).asTerm + } + def requiredValueRef(name: PreName)(using Context): TermRef = + requiredValue(name).termRef + + def requiredClass(pname: PreName)(using Context): ClassSymbol = { + val name = pname.toTypeName + info.member(name).requiredSymbol("class", name, this)(_.isClass).asClass + } + + def requiredType(pname: PreName)(using Context): TypeSymbol = { + val name = pname.toTypeName + info.member(name).requiredSymbol("type", name, this)(_.isType).asType + } + + /** The alternative of this denotation that has a type matching `targetType` when seen + * as a member of type `site` and that has a target name matching `targetName`, or + * `NoDenotation` if none exists. + */ + def matchingDenotation(site: Type, targetType: Type, targetName: Name)(using Context): SingleDenotation = { + def qualifies(sym: Symbol) = + site.memberInfo(sym).matchesLoosely(targetType) && sym.hasTargetName(targetName) + if (isOverloaded) + atSignature(targetType.signature, targetName, site, relaxed = true) match { + case sd: SingleDenotation => sd.matchingDenotation(site, targetType, targetName) + case md => md.suchThat(qualifies(_)) + } + else if (exists && !qualifies(symbol)) NoDenotation + else asSingleDenotation + } + + /** Form a denotation by conjoining with denotation `that`. + * + * NoDenotations are dropped. MultiDenotations are handled by merging + * parts with same signatures. SingleDenotations with equal signatures + * are joined by following this sequence of steps: + * + * 1. If exactly one the denotations has an inaccessible symbol, pick the other one. + * 2. Otherwise, if one of the infos overrides the other one, and the associated + * symbol does not score strictly lower than the other one, + * pick the associated denotation. + * 3. Otherwise, if the two infos can be combined with `infoMeet`, pick that as + * result info, and pick the symbol that scores higher as result symbol, + * or pick `sym1` as a tie breaker. The picked info and symbol are combined + * in a JointDenotation. + * 4. Otherwise, if one of the two symbols scores strongly higher than the + * other one, pick the associated denotation. + * 5. Otherwise return a multi-denotation consisting of both denotations. + * + * Symbol scoring is determined according to the following ranking + * where earlier criteria trump later ones. Cases marked with (*) + * give a strong score advantage, the others a weak one. + * + * 1. The symbol exists, and the other one does not. (*) + * 2. The symbol is not a bridge, but the other one is. (*) + * 3. The symbol is concrete, and the other one is deferred + * 4. The symbol appears before the other in the linearization of `pre` + * 5. The symbol's visibility is strictly greater than the other one's. + * 6. The symbol is a method, but the other one is not. + */ + def meet(that: Denotation, pre: Type, safeIntersection: Boolean = false)(using Context): Denotation = { + /** Try to merge denot1 and denot2 without adding a new signature. */ + def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match { + case denot1 @ MultiDenotation(denot11, denot12) => + val d1 = mergeDenot(denot11, denot2) + if (d1.exists) denot1.derivedUnionDenotation(d1, denot12) + else { + val d2 = mergeDenot(denot12, denot2) + if (d2.exists) denot1.derivedUnionDenotation(denot11, d2) + else NoDenotation + } + case denot1: SingleDenotation => + if (denot1 eq denot2) denot1 + else if denot1.matches(denot2) then mergeSingleDenot(denot1, denot2) + else NoDenotation + } + + /** Try to merge single-denotations. */ + def mergeSingleDenot(denot1: SingleDenotation, denot2: SingleDenotation): Denotation = + val info1 = denot1.info + val info2 = denot2.info + val sym1 = denot1.symbol + val sym2 = denot2.symbol + + /** Does `owner1` come before `owner2` in the linearization of `pre`? */ + def linearScore(owner1: Symbol, owner2: Symbol): Int = + + def searchBaseClasses(bcs: List[ClassSymbol]): Int = bcs match + case bc :: bcs1 => + if bc eq owner1 then 1 + else if bc eq owner2 then -1 + else searchBaseClasses(bcs1) + case Nil => 0 + + if owner1 eq owner2 then 0 + else if owner1.derivesFrom(owner2) then 1 + else if owner2.derivesFrom(owner1) then -1 + else searchBaseClasses(pre.baseClasses) + end linearScore + + /** Similar to SymDenotation#accessBoundary, but without the special cases. */ + def accessBoundary(sym: Symbol) = + if (sym.is(Private)) sym.owner + else sym.privateWithin.orElse( + if (sym.is(Protected)) sym.owner.enclosingPackageClass + else defn.RootClass) + + def isHidden(sym: Symbol) = sym.exists && !sym.isAccessibleFrom(pre) + // In typer phase filter out denotations with symbols that are not + // accessible. After typer, this is not possible since we cannot guarantee + // that the current owner is set correctly. See pos/14660.scala. + val hidden1 = isHidden(sym1) && ctx.isTyper + val hidden2 = isHidden(sym2) && ctx.isTyper + if hidden1 && !hidden2 then denot2 + else if hidden2 && !hidden1 then denot1 + else + // The score that determines which symbol to pick for the result denotation. + // A value > 0 means pick `sym1`, < 0 means pick `sym2`. + // A value of +/- 2 means pick one of the denotations as a tie-breaker + // if a common info does not exist. + val symScore: Int = + if !sym1.exists then -2 + else if !sym2.exists then 2 + else if sym1.is(Bridge) && !sym2.is(Bridge) then -2 + else if sym2.is(Bridge) && !sym1.is(Bridge) then 2 + else if !sym1.isAsConcrete(sym2) then -1 + else if !sym2.isAsConcrete(sym1) then 1 + else + val linScore = linearScore(sym1.owner, sym2.owner) + if linScore != 0 then linScore + else + val boundary1 = accessBoundary(sym1) + val boundary2 = accessBoundary(sym2) + if boundary1.isProperlyContainedIn(boundary2) then -1 + else if boundary2.isProperlyContainedIn(boundary1) then 1 + else if sym2.is(Method) && !sym1.is(Method) then -1 + else if sym1.is(Method) && !sym2.is(Method) then 1 + else 0 + + val relaxedOverriding = ctx.explicitNulls && (sym1.is(JavaDefined) || sym2.is(JavaDefined)) + val matchLoosely = sym1.matchNullaryLoosely || sym2.matchNullaryLoosely + + if symScore <= 0 && info2.overrides(info1, relaxedOverriding, matchLoosely, checkClassInfo = false) then + denot2 + else if symScore >= 0 && info1.overrides(info2, relaxedOverriding, matchLoosely, checkClassInfo = false) then + denot1 + else + val jointInfo = infoMeet(info1, info2, safeIntersection) + if jointInfo.exists then + val sym = if symScore >= 0 then sym1 else sym2 + JointRefDenotation(sym, jointInfo, denot1.validFor & denot2.validFor, pre, denot1.isRefinedMethod || denot2.isRefinedMethod) + else if symScore == 2 then denot1 + else if symScore == -2 then denot2 + else + overload.println(i"overloaded with same signature: ${sym1.showLocated}: $info1 / ${sym2.showLocated}: $info2, info = ${info1.getClass}, ${info2.getClass}, $jointInfo") + MultiDenotation(denot1, denot2) + end mergeSingleDenot + + if (this eq that) this + else if (!this.exists) that + else if (!that.exists) this + else that match { + case that: SingleDenotation => + val r = mergeDenot(this, that) + if (r.exists) r else MultiDenotation(this, that) + case that @ MultiDenotation(denot1, denot2) => + this.meet(denot1, pre).meet(denot2, pre) + } + } + + final def asSingleDenotation: SingleDenotation = asInstanceOf[SingleDenotation] + final def asSymDenotation: SymDenotation = asInstanceOf[SymDenotation] + + def toText(printer: Printer): Text = printer.toText(this) + + // ------ PreDenotation ops ---------------------------------------------- + + final def toDenot(pre: Type)(using Context): Denotation = this + final def containsSym(sym: Symbol): Boolean = hasUniqueSym && (symbol eq sym) + } + + // ------ Info meets ---------------------------------------------------- + + /** Merge parameter names of lambda types. If names in corresponding positions match, keep them, + * otherwise generate new synthetic names. + */ + private def mergeParamNames(tp1: LambdaType, tp2: LambdaType): List[tp1.ThisName] = + (for ((name1, name2, idx) <- tp1.paramNames.lazyZip(tp2.paramNames).lazyZip(tp1.paramNames.indices)) + yield if (name1 == name2) name1 else tp1.companion.syntheticParamName(idx)).toList + + /** Normally, `tp1 & tp2`, with extra care taken to return `tp1` or `tp2` directly if that's + * a valid answer. Special cases for matching methods and classes, with + * the possibility of returning NoType. Special handling of ExprTypes, where mixed + * intersections widen the ExprType away. + */ + def infoMeet(tp1: Type, tp2: Type, safeIntersection: Boolean)(using Context): Type = + if tp1 eq tp2 then tp1 + else tp1 match + case tp1: TypeBounds => + tp2 match + case tp2: TypeBounds => if safeIntersection then tp1 safe_& tp2 else tp1 & tp2 + case tp2: ClassInfo => tp2 + case _ => NoType + case tp1: ClassInfo => + tp2 match + case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix) + case tp2: TypeBounds => tp1 + case _ => NoType + case tp1: MethodType => + tp2 match + case tp2: MethodType + if TypeComparer.matchingMethodParams(tp1, tp2) + && tp1.isImplicitMethod == tp2.isImplicitMethod + && tp1.isErasedMethod == tp2.isErasedMethod => + val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) + if resType.exists then + tp1.derivedLambdaType(mergeParamNames(tp1, tp2), tp1.paramInfos, resType) + else NoType + case _ => NoType + case tp1: PolyType => + tp2 match + case tp2: PolyType if tp1.paramNames.hasSameLengthAs(tp2.paramNames) => + val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) + if resType.exists then + tp1.derivedLambdaType( + mergeParamNames(tp1, tp2), + tp1.paramInfos.zipWithConserve(tp2.paramInfos)( _ & _ ), + resType) + else NoType + case _ => NoType + case ExprType(rtp1) => + tp2 match + case ExprType(rtp2) => ExprType(rtp1 & rtp2) + case _ => infoMeet(rtp1, tp2, safeIntersection) + case _ => + tp2 match + case _: MethodType | _: PolyType => NoType + case _ => tp1 & tp2.widenExpr + end infoMeet + + /** A non-overloaded denotation */ + abstract class SingleDenotation(symbol: Symbol, initInfo: Type) extends Denotation(symbol, initInfo) { + protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation + + final def name(using Context): Name = symbol.name + + /** For SymDenotation, this is NoPrefix. For other denotations this is the prefix + * under which the denotation was constructed. + * + * Note that `asSeenFrom` might return a `SymDenotation` and therefore in + * general one cannot rely on `prefix` being set, see + * `Config.reuseSymDenotations` for details. + */ + def prefix: Type = NoPrefix + + /** True if the info of this denotation comes from a refinement. */ + def isRefinedMethod: Boolean = false + + /** For SymDenotations, the language-specific signature of the info, depending on + * where the symbol is defined. For non-SymDenotations, the Scala 3 + * signature. + * + * Invariants: + * - Before erasure, the signature of a denotation is always equal to the + * signature of its corresponding initial denotation. + * - Two distinct overloads will have SymDenotations with distinct + * signatures (the SELECTin tag in Tasty relies on this to refer to an + * overload unambiguously). Note that this only applies to + * SymDenotations, in general we cannot assume that distinct + * SingleDenotations will have distinct signatures (cf #9050). + */ + final def signature(using Context): Signature = + signature(sourceLanguage = if isType || !this.isInstanceOf[SymDenotation] then SourceLanguage.Scala3 else SourceLanguage(symbol)) + + /** Overload of `signature` which lets the caller pick the language used + * to compute the signature of the info. Useful to match denotations defined in + * different classes (see `matchesLoosely`). + */ + def signature(sourceLanguage: SourceLanguage)(using Context): Signature = + if (isType) Signature.NotAMethod // don't force info if this is a type denotation + else info match { + case info: MethodOrPoly => + try info.signature(sourceLanguage) + catch { // !!! DEBUG + case scala.util.control.NonFatal(ex) => + report.echo(s"cannot take signature of $info") + throw ex + } + case _ => Signature.NotAMethod + } + + def derivedSingleDenotation(symbol: Symbol, info: Type, pre: Type = this.prefix, isRefinedMethod: Boolean = this.isRefinedMethod)(using Context): SingleDenotation = + if ((symbol eq this.symbol) && (info eq this.info) && (pre eq this.prefix) && (isRefinedMethod == this.isRefinedMethod)) this + else newLikeThis(symbol, info, pre, isRefinedMethod) + + def mapInfo(f: Type => Type)(using Context): SingleDenotation = + derivedSingleDenotation(symbol, f(info)) + + inline def orElse(inline that: SingleDenotation): SingleDenotation = if (this.exists) this else that + + def altsWith(p: Symbol => Boolean): List[SingleDenotation] = + if (exists && p(symbol)) this :: Nil else Nil + + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = + if (exists && p(symbol)) this else NoDenotation + + def hasAltWith(p: SingleDenotation => Boolean): Boolean = + exists && p(this) + + def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = + if (!symbol.exists || symbol.isAccessibleFrom(pre, superAccess)) this else NoDenotation + + def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): SingleDenotation = + val situated = if site == NoPrefix then this else asSeenFrom(site) + val sigMatches = sig.matchDegree(situated.signature) match + case FullMatch => + true + case MethodNotAMethodMatch => + // See comment in `matches` + relaxed && !symbol.is(JavaDefined) + case ParamMatch => + relaxed + case noMatch => + false + if sigMatches && symbol.hasTargetName(targetName) then this else NoDenotation + + def matchesImportBound(bound: Type)(using Context): Boolean = + if bound.isRef(defn.NothingClass) then false + else if bound.isAny then true + else NoViewsAllowed.normalizedCompatible(info, bound, keepConstraint = false) + + // ------ Transformations ----------------------------------------- + + private var myValidFor: Period = Nowhere + + def validFor: Period = myValidFor + def validFor_=(p: Period): Unit = { + myValidFor = p + symbol.invalidateDenotCache() + } + + /** The next SingleDenotation in this run, with wrap-around from last to first. + * + * There may be several `SingleDenotation`s with different validity + * representing the same underlying definition at different phases. + * These are called a "flock". Flock members are generated by + * @See current. Flock members are connected in a ring + * with their `nextInRun` fields. + * + * There are the following invariants concerning flock members + * + * 1) validity periods are non-overlapping + * 2) the union of all validity periods is a contiguous + * interval. + */ + protected var nextInRun: SingleDenotation = this + + /** The version of this SingleDenotation that was valid in the first phase + * of this run. + */ + def initial: SingleDenotation = + if (validFor.firstPhaseId <= 1) this + else { + var current = nextInRun + while (current.validFor.code > this.myValidFor.code) current = current.nextInRun + current + } + + def history: List[SingleDenotation] = { + val b = new ListBuffer[SingleDenotation] + var current = initial + while ({ + b += (current) + current = current.nextInRun + current ne initial + }) + () + b.toList + } + + /** Invalidate all caches and fields that depend on base classes and their contents */ + def invalidateInheritedInfo(): Unit = () + + private def updateValidity()(using Context): this.type = { + assert( + ctx.runId >= validFor.runId + || ctx.settings.YtestPickler.value // mixing test pickler with debug printing can travel back in time + || ctx.mode.is(Mode.Printing) // no use to be picky when printing error messages + || symbol.isOneOf(ValidForeverFlags), + s"denotation $this invalid in run ${ctx.runId}. ValidFor: $validFor") + var d: SingleDenotation = this + while ({ + d.validFor = Period(ctx.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId) + d.invalidateInheritedInfo() + d = d.nextInRun + d ne this + }) + () + this + } + + /** Move validity period of this denotation to a new run. Throw a StaleSymbol error + * if denotation is no longer valid. + * However, StaleSymbol error is not thrown in the following situations: + * + * - If acceptStale returns true (e.g. because we are in the IDE), + * update the symbol to the new version if it exists, or return + * the old version otherwise. + * - If the symbol did not have a denotation that was defined at the current phase + * return a NoDenotation instead. + */ + private def bringForward()(using Context): SingleDenotation = { + this match { + case symd: SymDenotation => + if (stillValid(symd)) return updateValidity() + if acceptStale(symd) && symd.initial.validFor.firstPhaseId <= ctx.lastPhaseId then + // New run might have fewer phases than old, so symbol might no longer be + // visible at all. TabCompleteTests have examples where this happens. + return symd.currentSymbol.denot.orElse(symd).updateValidity() + case _ => + } + if (!symbol.exists) return updateValidity() + if (!coveredInterval.containsPhaseId(ctx.phaseId)) return NoDenotation + if (ctx.debug) traceInvalid(this) + staleSymbolError + } + + /** The next defined denotation (following `nextInRun`) or an arbitrary + * undefined denotation, if all denotations in a `nextinRun` cycle are + * undefined. + */ + private def nextDefined: SingleDenotation = { + var p1 = this + var p2 = nextInRun + while (p1.validFor == Nowhere && (p1 ne p2)) { + p1 = p1.nextInRun + p2 = p2.nextInRun.nextInRun + } + p1 + } + + /** Skip any denotations that have been removed by an installAfter or that + * are otherwise undefined. + */ + def skipRemoved(using Context): SingleDenotation = + if (myValidFor.code <= 0) nextDefined else this + + /** Produce a denotation that is valid for the given context. + * Usually called when !(validFor contains ctx.period) + * (even though this is not a precondition). + * If the runId of the context is the same as runId of this denotation, + * the right flock member is located, or, if it does not exist yet, + * created by invoking a transformer (@See Transformers). + * If the runId's differ, but this denotation is a SymDenotation + * and its toplevel owner class or module + * is still a member of its enclosing package, then the whole flock + * is brought forward to be valid in the new runId. Otherwise + * the symbol is stale, which constitutes an internal error. + */ + def current(using Context): SingleDenotation = + util.Stats.record("current") + val currentPeriod = ctx.period + val valid = myValidFor + + def assertNotPackage(d: SingleDenotation, transformer: DenotTransformer) = d match + case d: ClassDenotation => + assert(!d.is(Package), s"illegal transformation of package denotation by transformer $transformer") + case _ => + + def escapeToNext = nextDefined.ensuring(_.validFor != Nowhere) + + def toNewRun = + util.Stats.record("current.bringForward") + if exists then initial.bringForward().current else this + + def goForward = + var cur = this + // search for containing period as long as nextInRun increases. + var next = nextInRun + while next.validFor.code > valid.code && !(next.validFor contains currentPeriod) do + cur = next + next = next.nextInRun + if next.validFor.code > valid.code then + // in this case, next.validFor contains currentPeriod + cur = next + cur + else + //println(s"might need new denot for $cur, valid for ${cur.validFor} at $currentPeriod") + // not found, cur points to highest existing variant + val nextTransformerId = ctx.base.nextDenotTransformerId(cur.validFor.lastPhaseId) + if currentPeriod.lastPhaseId <= nextTransformerId then + cur.validFor = Period(currentPeriod.runId, cur.validFor.firstPhaseId, nextTransformerId) + else + var startPid = nextTransformerId + 1 + val transformer = ctx.base.denotTransformers(nextTransformerId) + //println(s"transforming $this with $transformer") + val savedPeriod = ctx.period + val mutCtx = ctx.asInstanceOf[FreshContext] + try + mutCtx.setPhase(transformer) + next = transformer.transform(cur) + // We temporarily update the context with the new phase instead of creating a + // new one. This is done for performance. We cut down on about 30% of context + // creations that way, and also avoid phase caches in contexts to get large. + // To work correctly, we need to demand that the context with the new phase + // is not retained in the result. + catch case ex: CyclicReference => + // println(s"error while transforming $this") + throw ex + finally + mutCtx.setPeriod(savedPeriod) + if next eq cur then + startPid = cur.validFor.firstPhaseId + else + assertNotPackage(next, transformer) + next.insertAfter(cur) + cur = next + cur.validFor = Period(currentPeriod.runId, startPid, transformer.lastPhaseId) + //printPeriods(cur) + //println(s"new denot: $cur, valid for ${cur.validFor}") + cur.current // multiple transformations could be required + end goForward + + def goBack: SingleDenotation = + // currentPeriod < end of valid; in this case a version must exist + // but to be defensive we check for infinite loop anyway + var cur = this + var cnt = 0 + while !(cur.validFor contains currentPeriod) do + //println(s"searching: $cur at $currentPeriod, valid for ${cur.validFor}") + cur = cur.nextInRun + // Note: One might be tempted to add a `prev` field to get to the new denotation + // more directly here. I tried that, but it degrades rather than improves + // performance: Test setup: Compile everything in dotc and immediate subdirectories + // 10 times. Best out of 10: 18154ms with `prev` field, 17777ms without. + cnt += 1 + if cnt > MaxPossiblePhaseId then + return atPhase(coveredInterval.firstPhaseId)(current) + cur + end goBack + + if valid.code <= 0 then + // can happen if we sit on a stale denotation which has been replaced + // wholesale by an installAfter; in this case, proceed to the next + // denotation and try again. + escapeToNext + else if valid.runId != currentPeriod.runId then + toNewRun + else if currentPeriod.code > valid.code then + goForward + else + goBack + end current + + private def demandOutsideDefinedMsg(using Context): String = + s"demanding denotation of $this at phase ${ctx.phase}(${ctx.phaseId}) outside defined interval: defined periods are${definedPeriodsString}" + + /** Install this denotation to be the result of the given denotation transformer. + * This is the implementation of the same-named method in SymDenotations. + * It's placed here because it needs access to private fields of SingleDenotation. + * @pre Can only be called in `phase.next`. + */ + protected def installAfter(phase: DenotTransformer)(using Context): Unit = { + val targetId = phase.next.id + if (ctx.phaseId != targetId) atPhase(phase.next)(installAfter(phase)) + else { + val current = symbol.current + // println(s"installing $this after $phase/${phase.id}, valid = ${current.validFor}") + // printPeriods(current) + this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId) + if (current.validFor.firstPhaseId >= targetId) + current.replaceWith(this) + else { + current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1) + insertAfter(current) + } + } + // printPeriods(this) + } + + /** Apply a transformation `f` to all denotations in this group that start at or after + * given phase. Denotations are replaced while keeping the same validity periods. + */ + protected def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(using Context): Unit = { + var current = symbol.current + while (current.validFor.firstPhaseId < phase.id && (current.nextInRun.validFor.code > current.validFor.code)) + current = current.nextInRun + var hasNext = true + while ((current.validFor.firstPhaseId >= phase.id) && hasNext) { + val current1: SingleDenotation = f(current.asSymDenotation) + if (current1 ne current) { + current1.validFor = current.validFor + current.replaceWith(current1) + } + hasNext = current1.nextInRun.validFor.code > current1.validFor.code + current = current1.nextInRun + } + } + + /** Insert this denotation so that it follows `prev`. */ + private def insertAfter(prev: SingleDenotation) = { + this.nextInRun = prev.nextInRun + prev.nextInRun = this + } + + /** Insert this denotation instead of `old`. + * Also ensure that `old` refers with `nextInRun` to this denotation + * and set its `validFor` field to `Nowhere`. This is necessary so that + * references to the old denotation can be brought forward via `current` + * to a valid denotation. + * + * The code to achieve this is subtle in that it works correctly + * whether the replaced denotation is the only one in its cycle or not. + */ + private[dotc] def replaceWith(newd: SingleDenotation): Unit = { + var prev = this + while (prev.nextInRun ne this) prev = prev.nextInRun + // order of next two assignments is important! + prev.nextInRun = newd + newd.nextInRun = nextInRun + validFor = Nowhere + nextInRun = newd + } + + def staleSymbolError(using Context): Nothing = + inDetachedContext: + throw new StaleSymbol(staleSymbolMsg) + + def staleSymbolMsg(using Context): String = { + def ownerMsg = this match { + case denot: SymDenotation => s"in ${denot.owner}" + case _ => "" + } + s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${myValidFor}, is referred to in run ${ctx.period}" + } + + /** The period (interval of phases) for which there exists + * a valid denotation in this flock. + */ + def coveredInterval(using Context): Period = { + var cur = this + var cnt = 0 + var interval = validFor + while ({ + cur = cur.nextInRun + cnt += 1 + assert(cnt <= MaxPossiblePhaseId, demandOutsideDefinedMsg) + interval |= cur.validFor + cur ne this + }) + () + interval + } + + /** Show declaration string; useful for showing declarations + * as seen from subclasses. + */ + def showDcl(using Context): String = ctx.printer.dclText(this).show + + override def toString: String = + if (symbol == NoSymbol) symbol.toString + else s"" + + def definedPeriodsString: String = { + var sb = new StringBuilder() + var cur = this + var cnt = 0 + while ({ + sb.append(" " + cur.validFor) + cur = cur.nextInRun + cnt += 1 + if (cnt > MaxPossiblePhaseId) { sb.append(" ..."); cur = this } + cur ne this + }) + () + sb.toString + } + + // ------ PreDenotation ops ---------------------------------------------- + + final def first: SingleDenotation = this + final def last: SingleDenotation = this + + def matches(other: SingleDenotation)(using Context): Boolean = + symbol.hasTargetName(other.symbol.targetName) + && matchesLoosely(other) + + /** `matches` without a target name check. + * + * For definitions coming from different languages, we pick a common + * language to compute their signatures. This allows us for example to + * override some Java definitions from Scala even if they have a different + * erasure (see i8615b, i9109b), Erasure takes care of adding any necessary + * bridge to make this work at runtime. + */ + def matchesLoosely(other: SingleDenotation, alwaysCompareTypes: Boolean = false)(using Context): Boolean = + if isType then true + else + val thisLanguage = SourceLanguage(symbol) + val otherLanguage = SourceLanguage(other.symbol) + val commonLanguage = SourceLanguage.commonLanguage(thisLanguage, otherLanguage) + val sig = signature(commonLanguage) + val otherSig = other.signature(commonLanguage) + sig.matchDegree(otherSig) match + case FullMatch => + !alwaysCompareTypes || info.matches(other.info) + case MethodNotAMethodMatch => + !ctx.erasedTypes && { + // A Scala zero-parameter method and a Scala non-method always match. + if !thisLanguage.isJava && !otherLanguage.isJava then + true + // Java allows defining both a field and a zero-parameter method with the same name, + // so they must not match. + else if thisLanguage.isJava && otherLanguage.isJava then + false + // A Java field never matches a Scala method. + else if thisLanguage.isJava then + symbol.is(Method) + else // otherLanguage.isJava + other.symbol.is(Method) + } + case ParamMatch => + // The signatures do not tell us enough to be sure about matching + !ctx.erasedTypes && info.matches(other.info) + case noMatch => + false + + def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): SingleDenotation = + if hasUniqueSym && prevDenots.containsSym(symbol) then NoDenotation + else if isType then filterDisjoint(ownDenots).asSeenFrom(pre) + else asSeenFrom(pre).filterDisjoint(ownDenots) + + def filterWithPredicate(p: SingleDenotation => Boolean): SingleDenotation = + if (p(this)) this else NoDenotation + def filterDisjoint(denots: PreDenotation)(using Context): SingleDenotation = + if (denots.exists && denots.matches(this)) NoDenotation else this + def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): SingleDenotation = + val realExcluded = if ctx.isAfterTyper then excluded else excluded | Invisible + def symd: SymDenotation = this match + case symd: SymDenotation => symd + case _ => symbol.denot + if !required.isEmpty && !symd.isAllOf(required) + || symd.isOneOf(realExcluded) then NoDenotation + else this + def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = f(this) + + type AsSeenFromResult = SingleDenotation + + protected def computeAsSeenFrom(pre: Type)(using Context): SingleDenotation = { + val symbol = this.symbol + val owner = this match { + case thisd: SymDenotation => thisd.owner + case _ => if (symbol.exists) symbol.owner else NoSymbol + } + + /** The derived denotation with the given `info` transformed with `asSeenFrom`. + * + * As a performance hack, we might reuse an existing SymDenotation, + * instead of creating a new denotation with a given `prefix`, + * see `Config.reuseSymDenotations`. + */ + def derived(info: Type) = + /** Do we need to return a denotation with a prefix set? */ + def needsPrefix = + // For opaque types, the prefix is used in `ElimOpaques#transform`, + // without this i7159.scala would fail when compiled from tasty. + symbol.is(Opaque) + + val derivedInfo = info.asSeenFrom(pre, owner) + if Config.reuseSymDenotations && this.isInstanceOf[SymDenotation] + && (derivedInfo eq info) && !needsPrefix then + this + else + derivedSingleDenotation(symbol, derivedInfo, pre) + end derived + + // Tt could happen that we see the symbol with prefix `this` as a member a different class + // through a self type and that it then has a different info. In this case we have to go + // through the asSeenFrom to switch the type back. Test case is pos/i9352.scala. + def hasOriginalInfo: Boolean = this match + case sd: SymDenotation => true + case _ => info eq symbol.info + + def ownerIsPrefix = pre match + case pre: ThisType => pre.sameThis(owner.thisType) + case _ => false + + if !owner.membersNeedAsSeenFrom(pre) && (!ownerIsPrefix || hasOriginalInfo) + || symbol.is(NonMember) + then this + else if symbol.isAllOf(ClassTypeParam) then + val arg = symbol.typeRef.argForParam(pre, widenAbstract = true) + if arg.exists + then derivedSingleDenotation(symbol, normalizedArgBounds(arg.bounds), pre) + else derived(symbol.info) + else derived(symbol.info) + } + + /** The argument bounds, possibly intersected with the parameter's info TypeBounds, + * if the latter is not F-bounded and does not refer to other type parameters + * of the same class, and the intersection is provably nonempty. + */ + private def normalizedArgBounds(argBounds: TypeBounds)(using Context): TypeBounds = + if symbol.isCompleted && !hasBoundsDependingOnParamsOf(symbol.owner) then + val combined @ TypeBounds(lo, hi) = symbol.info.bounds & argBounds + if (lo frozen_<:< hi) then combined + else argBounds + else argBounds + + private def hasBoundsDependingOnParamsOf(cls: Symbol)(using Context): Boolean = + val acc = new TypeAccumulator[Boolean]: + def apply(x: Boolean, tp: Type): Boolean = tp match + case _: LazyRef => true + case tp: TypeRef + if tp.symbol.isAllOf(ClassTypeParam) && tp.symbol.owner == cls => true + case _ => foldOver(x, tp) + acc(false, symbol.info) + } + + abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) extends SingleDenotation(symbol, initInfo) { + def infoOrCompleter: Type = initInfo + def isType: Boolean = infoOrCompleter.isInstanceOf[TypeType] + } + + class UniqueRefDenotation( + symbol: Symbol, + initInfo: Type, + initValidFor: Period, + prefix: Type) extends NonSymSingleDenotation(symbol, initInfo, prefix) { + validFor = initValidFor + override def hasUniqueSym: Boolean = true + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + if isRefinedMethod then + new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + else + new UniqueRefDenotation(s, i, validFor, pre) + } + + class JointRefDenotation( + symbol: Symbol, + initInfo: Type, + initValidFor: Period, + prefix: Type, + override val isRefinedMethod: Boolean) extends NonSymSingleDenotation(symbol, initInfo, prefix) { + validFor = initValidFor + override def hasUniqueSym: Boolean = false + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + } + + class ErrorDenotation(using DetachedContext) extends NonSymSingleDenotation(NoSymbol, NoType, NoType) { + override def exists: Boolean = false + override def hasUniqueSym: Boolean = false + validFor = Period.allInRun(ctx.runId) + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + this + } + + /** An error denotation that provides more info about the missing reference. + * Produced by staticRef, consumed by requiredSymbol. + */ + case class MissingRef(val owner: SingleDenotation, name: Name)(using DetachedContext) extends ErrorDenotation { + val ex: Exception = new Exception // DEBUG + } + + /** An error denotation that provides more info about alternatives + * that were found but that do not qualify. + * Produced by staticRef, consumed by requiredSymbol. + */ + case class NoQualifyingRef(alts: List[SingleDenotation])(using DetachedContext) extends ErrorDenotation + + /** A double definition + */ + def isDoubleDef(sym1: Symbol, sym2: Symbol)(using Context): Boolean = + (sym1.exists && sym2.exists && + (sym1 `ne` sym2) && (sym1.effectiveOwner `eq` sym2.effectiveOwner) && + !sym1.is(Bridge) && !sym2.is(Bridge)) + + // --- Overloaded denotations and predenotations ------------------------------------------------- + + trait MultiPreDenotation extends PreDenotation { + def denot1: PreDenotation + def denot2: PreDenotation + + assert(denot1.exists && denot2.exists, s"Union of non-existing denotations ($denot1) and ($denot2)") + def first: Denotation = denot1.first + def last: Denotation = denot2.last + def matches(other: SingleDenotation)(using Context): Boolean = + denot1.matches(other) || denot2.matches(other) + def mapInherited(owndenot: PreDenotation, prevdenot: PreDenotation, pre: Type)(using Context): PreDenotation = + derivedUnion(denot1.mapInherited(owndenot, prevdenot, pre), denot2.mapInherited(owndenot, prevdenot, pre)) + def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation = + derivedUnion(denot1 filterWithPredicate p, denot2 filterWithPredicate p) + def filterDisjoint(denot: PreDenotation)(using Context): PreDenotation = + derivedUnion(denot1 filterDisjoint denot, denot2 filterDisjoint denot) + def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation = + derivedUnion(denot1.filterWithFlags(required, excluded), denot2.filterWithFlags(required, excluded)) + def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = + g(denot1.aggregate(f, g), denot2.aggregate(f, g)) + protected def derivedUnion(denot1: PreDenotation, denot2: PreDenotation) = + if ((denot1 eq this.denot1) && (denot2 eq this.denot2)) this + else denot1 union denot2 + } + + final case class DenotUnion(denot1: PreDenotation, denot2: PreDenotation) extends MultiPreDenotation { + def exists: Boolean = true + def toDenot(pre: Type)(using Context): Denotation = + denot1.toDenot(pre).meet(denot2.toDenot(pre), pre) + def containsSym(sym: Symbol): Boolean = + (denot1 containsSym sym) || (denot2 containsSym sym) + type AsSeenFromResult = PreDenotation + def computeAsSeenFrom(pre: Type)(using Context): PreDenotation = + derivedUnion(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) + } + + /** An overloaded denotation consisting of the alternatives of both given denotations. + */ + case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType) with MultiPreDenotation { + final def infoOrCompleter: Type = multiHasNot("info") + final def validFor: Period = denot1.validFor & denot2.validFor + final def isType: Boolean = false + final def hasUniqueSym: Boolean = false + final def name(using Context): Name = denot1.name + final def signature(using Context): Signature = Signature.OverloadedSignature + def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): Denotation = + if (sig eq Signature.OverloadedSignature) this + else derivedUnionDenotation( + denot1.atSignature(sig, targetName, site, relaxed), + denot2.atSignature(sig, targetName, site, relaxed)) + def current(using Context): Denotation = + derivedUnionDenotation(denot1.current, denot2.current) + def altsWith(p: Symbol => Boolean): List[SingleDenotation] = + denot1.altsWith(p) ++ denot2.altsWith(p) + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = { + val sd1 = denot1.suchThat(p) + val sd2 = denot2.suchThat(p) + if sd1.exists then + if sd2.exists then + throw TypeError( + em"""Failure to disambiguate overloaded reference with + | ${denot1.symbol.showLocated}: ${denot1.info} and + | ${denot2.symbol.showLocated}: ${denot2.info}""") + else sd1 + else sd2 + } + override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation = + derivedUnionDenotation(denot1.filterWithPredicate(p), denot2.filterWithPredicate(p)) + def hasAltWith(p: SingleDenotation => Boolean): Boolean = + denot1.hasAltWith(p) || denot2.hasAltWith(p) + def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = { + val d1 = denot1 accessibleFrom (pre, superAccess) + val d2 = denot2 accessibleFrom (pre, superAccess) + if (!d1.exists) d2 + else if (!d2.exists) d1 + else derivedUnionDenotation(d1, d2) + } + def mapInfo(f: Type => Type)(using Context): Denotation = + derivedUnionDenotation(denot1.mapInfo(f), denot2.mapInfo(f)) + def derivedUnionDenotation(d1: Denotation, d2: Denotation): Denotation = + if ((d1 eq denot1) && (d2 eq denot2)) this + else if (!d1.exists) d2 + else if (!d2.exists) d1 + else MultiDenotation(d1, d2) + type AsSeenFromResult = Denotation + def computeAsSeenFrom(pre: Type)(using Context): Denotation = + derivedUnionDenotation(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) + override def toString: String = alternatives.mkString(" ") + + private def multiHasNot(op: String): Nothing = + throw new UnsupportedOperationException( + s"multi-denotation with alternatives $alternatives does not implement operation $op") + } + + /** The current denotation of the static reference given by path, + * or a MissingRef or NoQualifyingRef instance, if it does not exist. + * if generateStubs is set, generates stubs for missing top-level symbols + */ + def staticRef(path: Name, generateStubs: Boolean = true, isPackage: Boolean = false)(using Context): Denotation = { + def select(prefix: Denotation, selector: Name): Denotation = { + val owner = prefix.disambiguate(_.info.isParameterless) + def isPackageFromCoreLibMissing: Boolean = + // if the scala package is missing, the stdlib must be missing + owner.symbol == defn.RootClass && selector == nme.scala + if (owner.exists) { + val result = if (isPackage) owner.info.decl(selector) else owner.info.member(selector) + if (result.exists) result + else if (isPackageFromCoreLibMissing) throw new MissingCoreLibraryException(selector.toString) + else { + val alt = + if (generateStubs) missingHook(owner.symbol.moduleClass, selector) + else NoSymbol + if (alt.exists) alt.denot + else MissingRef(owner, selector) + } + } + else owner + } + def recur( + path: Name, + wrap: TermName -> Name = identity[Name] // !cc! default argument needs to be instantiated, error if [Name] is dropped + ): Denotation = path match { + case path: TypeName => + recur(path.toTermName, n => n.toTypeName) + case ModuleClassName(underlying) => + recur(underlying, n => wrap(ModuleClassName(n))) + case QualifiedName(prefix, selector) => + select(recur(prefix), wrap(selector)) + case qn @ AnyQualifiedName(prefix, _) => + recur(prefix, n => wrap(qn.info.mkString(n).toTermName)) + case path: SimpleName => + def recurSimple(len: Int, wrap: TermName -> Name): Denotation = { + val point = path.lastIndexOf('.', len - 1) + val selector = wrap(path.slice(point + 1, len).asTermName) + val prefix = + if (point > 0) recurSimple(point, identity) + else if (selector.isTermName) defn.RootClass.denot + else defn.EmptyPackageClass.denot + select(prefix, selector) + } + recurSimple(path.length, wrap) + } + + val run = ctx.run + if run == null then recur(path) + else run.staticRefs.getOrElseUpdate(path, recur(path)) + } + + /** If we are looking for a non-existing term name in a package, + * assume it is a package for which we do not have a directory and + * enter it. + */ + def missingHook(owner: Symbol, name: Name)(using Context): Symbol = + if (owner.is(Package) && name.isTermName) + newCompletePackageSymbol(owner, name.asTermName).entered + else + NoSymbol + + /** An exception for accessing symbols that are no longer valid in current run */ + class StaleSymbol(msg: -> String) extends Exception { + util.Stats.record("stale symbol") + override def getMessage(): String = msg + } +} diff --git a/tests/pos-with-compiler-cc/dotc/core/Flags.scala b/tests/pos-with-compiler-cc/dotc/core/Flags.scala new file mode 100644 index 000000000000..f23dce020f10 --- /dev/null +++ b/tests/pos-with-compiler-cc/dotc/core/Flags.scala @@ -0,0 +1,612 @@ +package dotty.tools.dotc +package core + +object Flags { + + object opaques { + + /** A FlagSet represents a set of flags. Flags are encoded as follows: + * The first two bits indicate whether a flag set applies to terms, + * to types, or to both. Bits 2..63 are available for properties + * and can be doubly used for terms and types. + */ + opaque type FlagSet = Long + def FlagSet(bits: Long): FlagSet = bits + def toBits(fs: FlagSet): Long = fs + + /** A flag set consisting of a single flag */ + opaque type Flag <: FlagSet = Long + private[Flags] def Flag(bits: Long): Flag = bits + } + export opaques.FlagSet + + type Flag = opaques.Flag + + extension (x: FlagSet) { + + inline def bits: Long = opaques.toBits(x) + + /** The union of the given flag sets. + * Combining two FlagSets with `|` will give a FlagSet + * that has the intersection of the applicability to terms/types + * of the two flag sets. It is checked that the intersection is not empty. + */ + def | (y: FlagSet): FlagSet = + if (x.bits == 0) y + else if (y.bits == 0) x + else { + val tbits = x.bits & y.bits & KINDFLAGS + if (tbits == 0) + assert(false, s"illegal flagset combination: ${x.flagsString} and ${y.flagsString}") + FlagSet(tbits | ((x.bits | y.bits) & ~KINDFLAGS)) + } + + /** The intersection of the given flag sets */ + def & (y: FlagSet): FlagSet = FlagSet(x.bits & y.bits) + + /** The intersection of a flag set with the complement of another flag set */ + def &~ (y: FlagSet): FlagSet = { + val tbits = x.bits & KINDFLAGS + if ((tbits & y.bits) == 0) x + else FlagSet(tbits | ((x.bits & ~y.bits) & ~KINDFLAGS)) + } + + def ^ (y: FlagSet) = + FlagSet((x.bits | y.bits) & KINDFLAGS | (x.bits ^ y.bits) & ~KINDFLAGS) + + /** Does the given flag set contain the given flag? + * This means that both the kind flags and the carrier bits have non-empty intersection. + */ + def is (flag: Flag): Boolean = { + val fs = x.bits & flag.bits + (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 + } + + /** Does the given flag set contain the given flag + * and at the same time contain none of the flags in the `butNot` set? + */ + def is (flag: Flag, butNot: FlagSet): Boolean = x.is(flag) && !x.isOneOf(butNot) + + /** Does the given flag set have a non-empty intersection with another flag set? + * This means that both the kind flags and the carrier bits have non-empty intersection. + */ + def isOneOf (flags: FlagSet): Boolean = { + val fs = x.bits & flags.bits + (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 + } + + /** Does the given flag set have a non-empty intersection with another flag set, + * and at the same time contain none of the flags in the `butNot` set? + */ + def isOneOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isOneOf(flags) && !x.isOneOf(butNot) + + /** Does a given flag set have all of the flags of another flag set? + * Pre: The intersection of the term/type flags of both sets must be non-empty. + */ + def isAllOf (flags: FlagSet): Boolean = { + val fs = x.bits & flags.bits + ((fs & KINDFLAGS) != 0 || flags.bits == 0) && + (fs >>> TYPESHIFT) == (flags.bits >>> TYPESHIFT) + } + + /** Does a given flag set have all of the flags in another flag set + * and at the same time contain none of the flags in the `butNot` set? + * Pre: The intersection of the term/type flags of both sets must be non-empty. + */ + def isAllOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isAllOf(flags) && !x.isOneOf(butNot) + + def isEmpty: Boolean = (x.bits & ~KINDFLAGS) == 0 + + /** Is a given flag set a subset of another flag set? */ + def <= (y: FlagSet): Boolean = (x.bits & y.bits) == x.bits + + /** Does the given flag set apply to terms? */ + def isTermFlags: Boolean = (x.bits & TERMS) != 0 + + /** Does the given flag set apply to terms? */ + def isTypeFlags: Boolean = (x.bits & TYPES) != 0 + + /** The given flag set with all flags transposed to be type flags */ + def toTypeFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TYPES) + + /** The given flag set with all flags transposed to be term flags */ + def toTermFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TERMS) + + /** The given flag set with all flags transposed to be common flags */ + def toCommonFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits | KINDFLAGS) + + /** The number of non-kind flags in the given flag set */ + def numFlags: Int = java.lang.Long.bitCount(x.bits & ~KINDFLAGS) + + /** The lowest non-kind bit set in the given flag set */ + def firstBit: Int = java.lang.Long.numberOfTrailingZeros(x.bits & ~KINDFLAGS) + + /** The list of non-empty names of flags with given index idx that are set in the given flag set */ + private def flagString(idx: Int): List[String] = + if ((x.bits & (1L << idx)) == 0) Nil + else { + def halfString(kind: Int) = + if ((x.bits & (1L << kind)) != 0) flagName(idx)(kind) else "" + val termFS = halfString(TERMindex) + val typeFS = halfString(TYPEindex) + val strs = termFS :: (if (termFS == typeFS) Nil else typeFS :: Nil) + strs filter (_.nonEmpty) + } + + /** The list of non-empty names of flags that are set in the given flag set */ + def flagStrings(privateWithin: String = ""): Seq[String] = { + var rawStrings = (2 to MaxFlag).flatMap(x.flagString(_)) // DOTTY problem: cannot drop with (_) + if (!privateWithin.isEmpty && !x.is(Protected)) + rawStrings = rawStrings :+ "private" + val scopeStr = if (x.is(Local)) "this" else privateWithin + if (scopeStr != "") + rawStrings.filter(_ != "").map { + case "private" => s"private[$scopeStr]" + case "protected" => s"protected[$scopeStr]" + case str => str + } + else rawStrings + } + + /** The string representation of the given flag set */ + def flagsString: String = x.flagStrings("").mkString(" ") + } + + // Temporary while extension names are in flux + def or(x1: FlagSet, x2: FlagSet) = x1 | x2 + def and(x1: FlagSet, x2: FlagSet) = x1 & x2 + + def termFlagSet(x: Long) = FlagSet(TERMS | x) + + private inline val TYPESHIFT = 2 + private inline val TERMindex = 0 + private inline val TYPEindex = 1 + private inline val TERMS = 1 << TERMindex + private inline val TYPES = 1 << TYPEindex + private inline val KINDFLAGS = TERMS | TYPES + + private inline val FirstFlag = 2 + private inline val FirstNotPickledFlag = 48 + private inline val MaxFlag = 63 + + private val flagName = Array.fill(64, 2)("") + + private def isDefinedAsFlag(idx: Int) = flagName(idx).exists(_.nonEmpty) + + /** The flag set containing all defined flags of either kind whose bits + * lie in the given range + */ + private def flagRange(start: Int, end: Int) = + FlagSet((start until end).foldLeft(KINDFLAGS.toLong) ((bits, idx) => + if (isDefinedAsFlag(idx)) bits | (1L << idx) else bits)) + + /** The union of all flags in given flag set */ + def union(flagss: FlagSet*): FlagSet = { + var flag = EmptyFlags + for (f <- flagss) + flag |= f + flag + } + + def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags): _*) + + /** The empty flag set */ + val EmptyFlags: FlagSet = FlagSet(0) + + /** The undefined flag set */ + val UndefinedFlags: FlagSet = FlagSet(~KINDFLAGS) + + /** Three flags with given index between 2 and 63. + * The first applies to both terms and types. the second is a term flag, and + * the third is a type flag. Installs given name(s) as the name(s) of the flags. + * @param name The name to be used for the term flag + * @param typeName The name to be used for the type flag, if it is different from `name`. + */ + private def newFlags(index: Int, name: String, typeName: String = ""): (Flag, Flag, Flag) = { + flagName(index)(TERMindex) = name + flagName(index)(TYPEindex) = if (typeName.isEmpty) name else typeName + val bits = 1L << index + (opaques.Flag(KINDFLAGS | bits), opaques.Flag(TERMS | bits), opaques.Flag(TYPES | bits)) + } + + // ----------------- Available flags ----------------------------------------------------- + + /** Labeled with `private` modifier */ + val (Private @ _, PrivateTerm @ _, PrivateType @ _) = newFlags(2, "private") + + /** Labeled with `protected` modifier */ + val (Protected @ _, _, _) = newFlags(3, "protected") + + /** Labeled with `override` modifier */ + val (Override @ _, _, _) = newFlags(4, "override") + + /** A declared, but not defined member */ + val (Deferred @ _, DeferredTerm @ _, DeferredType @ _) = newFlags(5, "") + + /** Labeled with `final` modifier */ + val (Final @ _, _, _) = newFlags(6, "final") + + /** A method symbol / a super trait */ + val (_, Method @ _, _) = newFlags(7, "") + + /** A (term or type) parameter to a class or method */ + val (Param @ _, TermParam @ _, TypeParam @ _) = newFlags(8, "") + + /** Labeled with `implicit` modifier (implicit value) */ + val (Implicit @ _, ImplicitVal @ _, _) = newFlags(9, "implicit") + + /** Labeled with `lazy` (a lazy val) / a trait */ + val (LazyOrTrait @ _, Lazy @ _, Trait @ _) = newFlags(10, "lazy", "") + + /** A value or variable accessor (getter or setter) */ + val (AccessorOrSealed @ _, Accessor @ _, Sealed @ _) = newFlags(11, "", "sealed") + + /** A mutable var, an open class */ + val (MutableOrOpen @ __, Mutable @ _, Open @ _) = newFlags(12, "mutable", "open") + + /** Symbol is local to current class (i.e. private[this] or protected[this] + * pre: Private or Protected are also set + */ + val (Local @ _, _, _) = newFlags(13, "") + + /** A field generated for a primary constructor parameter (no matter if it's a 'val' or not), + * or an accessor of such a field. + */ + val (_, ParamAccessor @ _, _) = newFlags(14, "") + + /** A value or class implementing a module */ + val (Module @ _, ModuleVal @ _, ModuleClass @ _) = newFlags(15, "module") + + /** A value or class representing a package */ + val (Package @ _, PackageVal @ _, PackageClass @ _) = newFlags(16, "") + + /** A case class or its companion object + * Note: Case is also used to indicate that a symbol is bound by a pattern. + */ + val (Case @ _, CaseVal @ _, CaseClass @ _) = newFlags(17, "case") + + /** A compiler-generated symbol, which is visible for type-checking + * (compare with artifact) + */ + val (Synthetic @ _, _, _) = newFlags(18, "") + + /** Labelled with `inline` modifier */ + val (Inline @ _, _, _) = newFlags(19, "inline") + + /** An outer accessor / a covariant type variable */ + val (OuterOrCovariant @ _, OuterAccessor @ _, Covariant @ _) = newFlags(20, "", "") + + /** The label of a labeled block / a contravariant type variable */ + val (LabelOrContravariant @ _, Label @ _, Contravariant @ _) = newFlags(21, "